You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by rl...@apache.org on 2017/10/02 20:39:12 UTC

[01/50] [abbrv] ambari git commit: AMBARI-22052.Zeppelin's conf HDFS directory owner is wrong(Prabhjyot Singh via Venkata Sairam)

Repository: ambari
Updated Branches:
  refs/heads/branch-feature-AMBARI-20859 3cefb74cd -> 17243c68e


AMBARI-22052.Zeppelin's conf HDFS directory owner is wrong(Prabhjyot Singh via Venkata Sairam)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6a99bd8b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6a99bd8b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6a99bd8b

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 6a99bd8b9723b806c6b099fc3c6836bf96f66a43
Parents: ee618e1
Author: Venkata Sairam <ve...@gmail.com>
Authored: Tue Sep 26 16:05:03 2017 +0530
Committer: Venkata Sairam <ve...@gmail.com>
Committed: Tue Sep 26 16:05:03 2017 +0530

----------------------------------------------------------------------
 .../common-services/ZEPPELIN/0.7.0/package/scripts/master.py   | 1 +
 .../src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py   | 6 +++---
 2 files changed, 4 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/6a99bd8b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
index 382566e..8bdfaec 100644
--- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
+++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
@@ -358,6 +358,7 @@ class Master(Script):
                             source=interpreter_config,
                             group=params.zeppelin_group,
                             owner=params.zeppelin_user,
+                            user=params.zeppelin_user,
                             replace_existing_files=True)
 
   def update_kerberos_properties(self):

http://git-wip-us.apache.org/repos/asf/ambari/blob/6a99bd8b/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py b/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
index f379f40..1fdbadb 100644
--- a/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
+++ b/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
@@ -337,7 +337,7 @@ class TestZeppelin070(RMFTestCase):
                               hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                               hdfs_site={u'a': u'b'},
                               kinit_path_local='/usr/bin/kinit',
-                              user="hdfs",
+                              user="zeppelin",
                               owner="zeppelin",
                               group="zeppelin",
                               type='file',
@@ -382,7 +382,7 @@ class TestZeppelin070(RMFTestCase):
                               hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                               hdfs_site={u'a': u'b'},
                               kinit_path_local='/usr/bin/kinit',
-                              user="hdfs",
+                              user="zeppelin",
                               owner="zeppelin",
                               group="zeppelin",
                               type='file',
@@ -427,7 +427,7 @@ class TestZeppelin070(RMFTestCase):
                               hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                               hdfs_site={u'a': u'b'},
                               kinit_path_local='/usr/bin/kinit',
-                              user="hdfs",
+                              user="zeppelin",
                               owner="zeppelin",
                               group="zeppelin",
                               type='file',


[37/50] [abbrv] ambari git commit: AMBARI-22083 - Wrong Hadoop Home Directory Is Being Picked Up on MAINT/PATCH Upgraded Clusters (jonathanhurley)

Posted by rl...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
index 8c04a79..c1fb5a2 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
@@ -30,10 +30,26 @@ from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions import copy_tarball
 from resource_management.libraries import functions
 from resource_management.core.logger import Logger
+from resource_management.libraries.functions.constants import StackFeature
+from resource_management.libraries.functions import stack_features
+from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions import conf_select
+
+# used for faking out stack features when the config files used by unit tests use older stacks
+def mock_stack_feature(stack_feature, stack_version):
+  if stack_feature == StackFeature.ROLLING_UPGRADE:
+    return True
+  if stack_feature == StackFeature.CONFIG_VERSIONING:
+    return True
+
+  return False
 
 @patch.object(functions, "get_stack_version", new = MagicMock(return_value="2.0.0.0-1234"))
 @patch("resource_management.libraries.functions.check_thrift_port_sasl", new=MagicMock())
 @patch("resource_management.libraries.functions.get_user_call_output.get_user_call_output", new=MagicMock(return_value=(0,'123','')))
+@patch.object(stack_select, "get_hadoop_dir", new=MagicMock(return_value="mock_hadoop_dir"))
+@patch.object(conf_select, "get_hadoop_conf_dir", new=MagicMock(return_value="/usr/hdp/current/hadoop-client/conf"))
+@patch.object(stack_features, "check_stack_feature", new=MagicMock(side_effect=mock_stack_feature))
 class TestHiveServer(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "HIVE/0.12.0.2.0/package"
   STACK_VERSION = "2.0.6"
@@ -72,17 +88,17 @@ class TestHiveServer(RMFTestCase):
     self.assert_configure_default()
 
     self.assertResourceCalled('Execute',
-                              'hive --config /etc/hive/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
-                              environment={'PATH': '/bin:/usr/hdp/current/hive-server2/bin:/usr/bin'},
+                              'hive --config /usr/hdp/current/hive-server2/conf/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
+                              environment={'PATH': '/bin:/usr/hdp/current/hive-server2/bin:mock_hadoop_dir'},
                               user='hive'
     )
-    self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.err /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
-        environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client',
+    self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.err /var/run/hive/hive-server.pid /usr/hdp/current/hive-server2/conf/conf.server /var/log/hive',
+        environment = {'HADOOP_HOME': 'mock_hadoop_dir',
            'HIVE_BIN': 'hive',
            'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
         not_if = "ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
         user = 'hive',
-        path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/bin'],
+        path = ['/bin:/usr/hdp/current/hive-server2/bin:mock_hadoop_dir'],
     )
     self.assertResourceCalled('Execute',
                               '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
@@ -107,13 +123,13 @@ class TestHiveServer(RMFTestCase):
 
     self.assert_configure_default(default_fs_default='hcfs://c6401.ambari.apache.org:8020')
 
-    self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.err /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
-                              environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client',
+    self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.err /var/run/hive/hive-server.pid /usr/hdp/current/hive-server2/conf/conf.server /var/log/hive',
+                              environment = {'HADOOP_HOME': 'mock_hadoop_dir',
                                              'HIVE_BIN': 'hive',
                                              'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
                               not_if = "ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
                               user = 'hive',
-                              path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/bin'],
+                              path = ['/bin:/usr/hdp/current/hive-server2/bin:mock_hadoop_dir'],
                               )
     self.assertResourceCalled('Execute',
                               '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
@@ -134,17 +150,17 @@ class TestHiveServer(RMFTestCase):
 
     self.assert_configure_default()
 
-    self.assertResourceCalled('Execute', 'hive --config /etc/hive/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
-                              environment = {'PATH': '/bin:/usr/hdp/current/hive-server2/bin:/usr/bin'},
+    self.assertResourceCalled('Execute', 'hive --config /usr/hdp/current/hive-server2/conf/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
+                              environment = {'PATH': '/bin:/usr/hdp/current/hive-server2/bin:mock_hadoop_dir'},
                               user = 'hive',
                               )
-    self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.err /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
-        environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client',
+    self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.err /var/run/hive/hive-server.pid /usr/hdp/current/hive-server2/conf/conf.server /var/log/hive',
+        environment = {'HADOOP_HOME': 'mock_hadoop_dir',
            'HIVE_BIN': 'hive',
            'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
         not_if = "ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
         user = 'hive',
-        path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/bin'],
+        path = ['/bin:/usr/hdp/current/hive-server2/bin:mock_hadoop_dir'],
     )
 
     self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
@@ -165,17 +181,17 @@ class TestHiveServer(RMFTestCase):
 
     self.assert_configure_default(no_tmp=True)
 
-    self.assertResourceCalled('Execute', 'hive --config /etc/hive/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
-                              environment = {'PATH': '/bin:/usr/hdp/current/hive-server2/bin:/usr/bin'},
+    self.assertResourceCalled('Execute', 'hive --config /usr/hdp/current/hive-server2/conf/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
+                              environment = {'PATH': '/bin:/usr/hdp/current/hive-server2/bin:mock_hadoop_dir'},
                               user = 'hive',
                               )
-    self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.err /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
-        environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client',
+    self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.err /var/run/hive/hive-server.pid /usr/hdp/current/hive-server2/conf/conf.server /var/log/hive',
+        environment = {'HADOOP_HOME': 'mock_hadoop_dir',
            'HIVE_BIN': 'hive',
            'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
         not_if = "ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
         user = 'hive',
-        path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/bin'],
+        path = ['/bin:/usr/hdp/current/hive-server2/bin:mock_hadoop_dir'],
     )
 
     self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
@@ -196,17 +212,17 @@ class TestHiveServer(RMFTestCase):
 
     self.assert_configure_default(no_tmp=True)
 
-    self.assertResourceCalled('Execute', 'hive --config /etc/hive/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
-                              environment = {'PATH': '/bin:/usr/hdp/current/hive-server2/bin:/usr/bin'},
+    self.assertResourceCalled('Execute', 'hive --config /usr/hdp/current/hive-server2/conf/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
+                              environment = {'PATH': '/bin:/usr/hdp/current/hive-server2/bin:mock_hadoop_dir'},
                               user = 'hive',
                               )
-    self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.err /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
-        environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client',
+    self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.err /var/run/hive/hive-server.pid /usr/hdp/current/hive-server2/conf/conf.server /var/log/hive',
+        environment = {'HADOOP_HOME': 'mock_hadoop_dir',
            'HIVE_BIN': 'hive',
            'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
         not_if = "ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
         user = 'hive',
-        path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/bin'],
+        path = ['/bin:/usr/hdp/current/hive-server2/bin:mock_hadoop_dir'],
     )
 
     self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
@@ -237,7 +253,7 @@ class TestHiveServer(RMFTestCase):
     self.assertResourceCalled('File', '/var/run/hive/hive-server.pid',
       action = ['delete'],
     )
-    
+
     self.assertNoMoreResources()
 
   def test_configure_secured(self):
@@ -273,13 +289,13 @@ class TestHiveServer(RMFTestCase):
     )
 
     self.assert_configure_secured()
-    self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.err /var/run/hive/hive-server.pid /etc/hive/conf.server /var/log/hive',
-        environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client',
+    self.assertResourceCalled('Execute', '/tmp/start_hiveserver2_script /var/log/hive/hive-server2.out /var/log/hive/hive-server2.err /var/run/hive/hive-server.pid /usr/hdp/current/hive-server2/conf/conf.server /var/log/hive',
+        environment = {'HADOOP_HOME': 'mock_hadoop_dir',
            'HIVE_BIN': 'hive',
            'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
         not_if = "ls /var/run/hive/hive-server.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
         user = 'hive',
-        path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/bin'],
+        path = ['/bin:/usr/hdp/current/hive-server2/bin:mock_hadoop_dir'],
     )
     self.assertResourceCalled('Execute',
                               '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
@@ -316,7 +332,7 @@ class TestHiveServer(RMFTestCase):
     self.assertResourceCalled('File', '/var/run/hive/hive-server.pid',
       action = ['delete'],
     )
-    
+
     self.assertNoMoreResources()
 
   def assert_configure_default(self, no_tmp = False, default_fs_default='hdfs://c6401.ambari.apache.org:8020'):
@@ -368,7 +384,7 @@ class TestHiveServer(RMFTestCase):
     )
     self.assertResourceCalled('XmlConfig', 'hive-site.xml',
                               group='hadoop',
-                              conf_dir='/etc/hive/conf.server',
+                              conf_dir='/usr/hdp/current/hive-server2/conf/conf.server',
                               mode=0600,
                               configuration_attributes={u'final': {u'hive.optimize.bucketmapjoin.sortedmerge': u'true',
                                                                    u'javax.jdo.option.ConnectionDriverName': u'true',
@@ -376,7 +392,7 @@ class TestHiveServer(RMFTestCase):
                               owner='hive',
                               configurations=self.getConfig()['configurations']['hive-site'],
     )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/conf.server/hive-env.sh',
                               content=InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
                               owner='hive',
                               group='hadoop',
@@ -433,7 +449,7 @@ class TestHiveServer(RMFTestCase):
                               content=Template('startHiveserver2.sh.j2'),
                               mode=0755,
     )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hadoop-metrics2-hiveserver2.properties',
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/conf.server/hadoop-metrics2-hiveserver2.properties',
                               owner = 'hive',
                               group = 'hadoop',
                               content = Template('hadoop-metrics2-hiveserver2.properties.j2'),
@@ -441,7 +457,7 @@ class TestHiveServer(RMFTestCase):
                               )
     self.assertResourceCalled('XmlConfig', 'hiveserver2-site.xml',
       group = 'hadoop',
-      conf_dir = '/etc/hive/conf.server',
+      conf_dir = '/usr/hdp/current/hive-server2/conf/conf.server',
       mode = 0600,
       owner = 'hive',
       configuration_attributes = self.getConfig()['configuration_attributes']['hiveserver2-site'],
@@ -451,13 +467,13 @@ class TestHiveServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/apps/webhcat',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
+        hadoop_bin_dir = 'mock_hadoop_dir',
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
         owner = 'hcat',
-        hadoop_conf_dir = '/etc/hadoop/conf',
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'directory',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs=default_fs_default,
         mode = 0755,
@@ -465,13 +481,13 @@ class TestHiveServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/user/hcat',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
+        hadoop_bin_dir = 'mock_hadoop_dir',
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
         owner = 'hcat',
-        hadoop_conf_dir = '/etc/hadoop/conf',
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'directory',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs=default_fs_default,
         mode = 0755,
@@ -480,13 +496,13 @@ class TestHiveServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/apps/hive/warehouse',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
+        hadoop_bin_dir = 'mock_hadoop_dir',
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
         owner = 'hive',
-        hadoop_conf_dir = '/etc/hadoop/conf',
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'directory',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs=default_fs_default,
         group = 'hadoop',
@@ -495,13 +511,13 @@ class TestHiveServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/user/hive',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
+        hadoop_bin_dir = 'mock_hadoop_dir',
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
         owner = 'hive',
-        hadoop_conf_dir = '/etc/hadoop/conf',
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'directory',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs=default_fs_default,
         mode = 0755,
@@ -510,14 +526,14 @@ class TestHiveServer(RMFTestCase):
       self.assertResourceCalled('HdfsResource', '/custompath/tmp/hive',
           immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
           security_enabled = False,
-          hadoop_conf_dir = '/etc/hadoop/conf',
+          hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
           keytab = UnknownConfigurationMock(),
           kinit_path_local = '/usr/bin/kinit',
           user = 'hdfs',
           dfs_type = '',
           owner = 'hive',
           group = 'hdfs',
-          hadoop_bin_dir = '/usr/bin',
+          hadoop_bin_dir = 'mock_hadoop_dir',
           type = 'directory',
           action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs=default_fs_default,
           mode = 0777,
@@ -525,13 +541,13 @@ class TestHiveServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', None,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
+        hadoop_bin_dir = 'mock_hadoop_dir',
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
         action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='missing_principal', default_fs=default_fs_default,
-        hadoop_conf_dir = '/etc/hadoop/conf',
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
 
   def assert_configure_secured(self):
@@ -577,7 +593,7 @@ class TestHiveServer(RMFTestCase):
     )
     self.assertResourceCalled('XmlConfig', 'hive-site.xml',
                               group='hadoop',
-                              conf_dir='/etc/hive/conf.server',
+                              conf_dir='/usr/hdp/current/hive-server2/conf/conf.server',
                               mode=0600,
                               configuration_attributes={u'final': {u'hive.optimize.bucketmapjoin.sortedmerge': u'true',
                                                                    u'javax.jdo.option.ConnectionDriverName': u'true',
@@ -585,7 +601,7 @@ class TestHiveServer(RMFTestCase):
                               owner='hive',
                               configurations=self.getConfig()['configurations']['hive-site'],
     )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/conf.server/hive-env.sh',
                               content=InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
                               owner='hive',
                               group='hadoop',
@@ -602,7 +618,7 @@ class TestHiveServer(RMFTestCase):
                               group='root',
                               mode=0644,
     )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/zkmigrator_jaas.conf',
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/conf.server/zkmigrator_jaas.conf',
                               content = Template('zkmigrator_jaas.conf.j2'),
                               owner = 'hive',
                               group = 'hadoop',
@@ -647,7 +663,7 @@ class TestHiveServer(RMFTestCase):
                               content=Template('startHiveserver2.sh.j2'),
                               mode=0755,
     )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hadoop-metrics2-hiveserver2.properties',
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/conf.server/hadoop-metrics2-hiveserver2.properties',
                               owner = 'hive',
                               group = 'hadoop',
                               content = Template('hadoop-metrics2-hiveserver2.properties.j2'),
@@ -655,7 +671,7 @@ class TestHiveServer(RMFTestCase):
     )
     self.assertResourceCalled('XmlConfig', 'hiveserver2-site.xml',
       group = 'hadoop',
-      conf_dir = '/etc/hive/conf.server',
+      conf_dir = '/usr/hdp/current/hive-server2/conf/conf.server',
       mode = 0600,
       owner = 'hive',
       configuration_attributes = self.getConfig()['configuration_attributes']['hiveserver2-site'],
@@ -664,13 +680,13 @@ class TestHiveServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/apps/webhcat',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
+        hadoop_bin_dir = 'mock_hadoop_dir',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
         owner = 'hcat',
-        hadoop_conf_dir = '/etc/hadoop/conf',
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'directory',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         mode = 0755,
@@ -678,13 +694,13 @@ class TestHiveServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/user/hcat',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
+        hadoop_bin_dir = 'mock_hadoop_dir',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
         owner = 'hcat',
-        hadoop_conf_dir = '/etc/hadoop/conf',
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'directory',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         mode = 0755,
@@ -693,13 +709,13 @@ class TestHiveServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/apps/hive/warehouse',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
+        hadoop_bin_dir = 'mock_hadoop_dir',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
         owner = 'hive',
-        hadoop_conf_dir = '/etc/hadoop/conf',
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'directory',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         group = 'hadoop',
@@ -708,13 +724,13 @@ class TestHiveServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/user/hive',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
+        hadoop_bin_dir = 'mock_hadoop_dir',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
         owner = 'hive',
-        hadoop_conf_dir = '/etc/hadoop/conf',
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         type = 'directory',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         mode = 0755,
@@ -722,14 +738,14 @@ class TestHiveServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/custompath/tmp/hive',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = True,
-        hadoop_conf_dir = '/etc/hadoop/conf',
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
         owner = 'hive',
         group = 'hdfs',
-        hadoop_bin_dir = '/usr/bin',
+        hadoop_bin_dir = 'mock_hadoop_dir',
         type = 'directory',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
         mode = 0777,
@@ -737,24 +753,24 @@ class TestHiveServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', None,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
+        hadoop_bin_dir = 'mock_hadoop_dir',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
         action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name='hdfs', default_fs='hdfs://c6401.ambari.apache.org:8020',
-        hadoop_conf_dir = '/etc/hadoop/conf',
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
     )
 
   @patch("time.time")
   @patch("socket.socket")
   def test_socket_timeout(self, socket_mock, time_mock):
     s = socket_mock.return_value
-    s.connect = MagicMock()    
+    s.connect = MagicMock()
     s.connect.side_effect = socket.error("")
-    
+
     time_mock.return_value = 1000
-    
+
     try:
       self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server.py",
                            classname = "HiveServer",
@@ -763,7 +779,7 @@ class TestHiveServer(RMFTestCase):
                            stack_version = self.STACK_VERSION,
                            target = RMFTestCase.TARGET_COMMON_SERVICES
       )
-      
+
       self.fail("Script failure due to socket error was expected")
     except:
       self.assert_configure_default()
@@ -790,8 +806,8 @@ From source with checksum 150f554beae04f76f814f59549dead8b"""
     )
 
     # ensure deregister is called
-    self.assertResourceCalledIgnoreEarlier('Execute', 'hive --config /etc/hive/conf.server --service hiveserver2 --deregister 1.2.1.2.3.0.0-2434',
-      path=['/bin:/usr/hdp/current/hive-server2/bin:/usr/hdp/current/hadoop-client/bin'],
+    self.assertResourceCalledIgnoreEarlier('Execute', 'hive --config /usr/hdp/current/hive-server2/conf/conf.server --service hiveserver2 --deregister 1.2.1.2.3.0.0-2434',
+      path=['/bin:/usr/hdp/current/hive-server2/bin:mock_hadoop_dir'],
       tries=1, user='hive')
 
     # ensure stop is called
@@ -823,8 +839,8 @@ From source with checksum 150f554beae04f76f814f59549dead8b"""
     )
 
     # ensure that deregister is called
-    self.assertResourceCalledIgnoreEarlier( 'Execute', 'hive --config /etc/hive/conf.server --service hiveserver2 --deregister 1.2.1.2.3.0.0-2434',
-      path=['/bin:/usr/hdp/current/hive-server2/bin:/usr/hdp/current/hadoop-client/bin'],
+    self.assertResourceCalledIgnoreEarlier( 'Execute', 'hive --config /usr/hdp/current/hive-server2/conf/conf.server --service hiveserver2 --deregister 1.2.1.2.3.0.0-2434',
+      path=['/bin:/usr/hdp/current/hive-server2/bin:mock_hadoop_dir'],
       tries=1, user='hive')
 
     # ensure hdp-select is called
@@ -877,7 +893,7 @@ From source with checksum 150f554beae04f76f814f59549dead8b"""
     self.assertResourceCalled('HdfsResource', None,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = 'mock_hadoop_dir',
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
@@ -918,7 +934,7 @@ From source with checksum 150f554beae04f76f814f59549dead8b"""
     self.assertResourceCalled('HdfsResource', None,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = 'mock_hadoop_dir',
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
index 66a5e54..7efb5fd 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
@@ -296,7 +296,7 @@ class TestServiceCheck(RMFTestCase):
 
     self.assertResourceCalled('Execute', "env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/hcatSmoke.sh hcatsmoke prepare true",
         logoutput = True,
-        path = ['/usr/sbin','/usr/local/bin','/bin','/usr/bin', '/bin:/usr/hdp/current/hadoop-client/bin:/usr/hdp/2.3.0.0-1234/hive/bin'],
+        path = ['/usr/sbin','/usr/local/bin','/bin','/usr/bin', '/bin:/usr/hdp/2.3.0.0-1234/hadoop/bin:/usr/hdp/2.3.0.0-1234/hive/bin'],
         tries = 3,
         user = 'ambari-qa',
         try_sleep = 5)
@@ -340,7 +340,7 @@ class TestServiceCheck(RMFTestCase):
     # LLAP call
     self.assertResourceCalled('Execute',
       "! beeline -u 'jdbc:hive2://c6402.ambari.apache.org:10500/;transportMode=binary' --hiveconf \"hiveLlapServiceCheck=\" -f /usr/hdp/current/hive-server2-hive2/scripts/llap/sql/serviceCheckScript.sql -e '' 2>&1| awk '{print}'|grep -i -e 'Invalid status\|Invalid URL\|command not found\|Connection refused'",
-      path = ['/usr/sbin', '/usr/local/bin', '/bin', '/usr/bin', '/bin:/usr/hdp/current/hadoop-client/bin:/usr/hdp/2.3.0.0-1234/hive2/bin'],
+      path = ['/usr/sbin', '/usr/local/bin', '/bin', '/usr/bin', '/bin:/usr/hdp/2.3.0.0-1234/hadoop/bin:/usr/hdp/2.3.0.0-1234/hive2/bin'],
       tries = 1,
       stderr = -1,
       wait_for_finish = True,

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
index 943c201..f9480ee 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
@@ -21,10 +21,22 @@ import json
 from mock.mock import MagicMock, patch
 from stacks.utils.RMFTestCase import *
 from resource_management.core.exceptions import Fail
+from resource_management.libraries.functions import StackFeature
 
+# used for faking out stack features when the config files used by unit tests use older stacks
+def mock_stack_feature(stack_feature, stack_version):
+  if stack_feature == StackFeature.ROLLING_UPGRADE:
+    return True
+  if stack_feature == StackFeature.CONFIG_VERSIONING:
+    return True
+  if stack_feature == StackFeature.HIVE_WEBHCAT_SPECIFIC_CONFIGS:
+    return True
+
+  return False
 
 @patch("os.path.isfile", new = MagicMock(return_value=True))
 @patch("glob.glob", new = MagicMock(return_value=["one", "two"]))
+@patch("resource_management.libraries.functions.stack_features.check_stack_feature", new=MagicMock(side_effect=mock_stack_feature))
 class TestWebHCatServer(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "HIVE/0.12.0.2.0/package"
   STACK_VERSION = "2.0.6"
@@ -136,7 +148,7 @@ class TestWebHCatServer(RMFTestCase):
 
     self.assert_configure_secured()
     self.assertResourceCalled('Execute', 'cd /var/run/webhcat ; /usr/hdp/current/hive-webhcat/sbin/webhcat_server.sh start',
-        environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client'},
+        environment = {'HADOOP_HOME': '/usr/hdp/2.1.0.0-1234/hadoop'},
         not_if = "ls /var/run/webhcat/webhcat.pid >/dev/null 2>&1 && ps -p `cat /var/run/webhcat/webhcat.pid` >/dev/null 2>&1",
         user = 'hcat',
     )
@@ -153,7 +165,7 @@ class TestWebHCatServer(RMFTestCase):
 
     self.assertResourceCalled('Execute', '/usr/hdp/current/hive-webhcat/sbin/webhcat_server.sh stop',
                               user = 'hcat',
-                              environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client' }
+                              environment = {'HADOOP_HOME': '/usr/hdp/2.1.0.0-1234/hadoop' }
                               )
 
     self.assertResourceCalled('Execute', 'ambari-sudo.sh kill -9 `cat /var/run/webhcat/webhcat.pid`',
@@ -207,7 +219,7 @@ class TestWebHCatServer(RMFTestCase):
                               create_parents = True,
                               mode = 0755,
                               )
-    self.assertResourceCalled('Directory', '/etc/hive-webhcat/conf',
+    self.assertResourceCalled('Directory', '/usr/hdp/current/hive-webhcat/etc/webhcat',
                               owner = 'hcat',
                               group = 'hadoop',
                               create_parents = True,
@@ -216,20 +228,20 @@ class TestWebHCatServer(RMFTestCase):
     self.assertResourceCalled('XmlConfig', 'webhcat-site.xml',
                               owner = 'hcat',
                               group = 'hadoop',
-                              conf_dir = '/etc/hive-webhcat/conf',
+                              conf_dir = '/usr/hdp/current/hive-webhcat/etc/webhcat',
                               configurations = self.getConfig()['configurations']['webhcat-site'],
                               configuration_attributes = self.getConfig()['configuration_attributes']['webhcat-site']
     )
-    self.assertResourceCalled('File', '/etc/hive-webhcat/conf/webhcat-env.sh',
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-webhcat/etc/webhcat/webhcat-env.sh',
                               content = InlineTemplate(self.getConfig()['configurations']['webhcat-env']['content']),
                               owner = 'hcat',
                               group = 'hadoop',
                               )
-    self.assertResourceCalled('Directory', '/usr/hdp/current/hive-webhcat/conf',
+    self.assertResourceCalled('Directory', '/usr/hdp/current/hive-webhcat/etc/webhcat',
         cd_access = 'a',
         create_parents = True
     )
-    self.assertResourceCalled('File', '/etc/hive-webhcat/conf/webhcat-log4j.properties',
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-webhcat/etc/webhcat/webhcat-log4j.properties',
                               content = InlineTemplate('log4jproperties\nline2'),
                               owner = 'hcat',
                               group = 'hadoop',
@@ -249,7 +261,7 @@ class TestWebHCatServer(RMFTestCase):
                               create_parents = True,
                               mode = 0755,
                               )
-    self.assertResourceCalled('Directory', '/etc/hive-webhcat/conf',
+    self.assertResourceCalled('Directory', '/usr/hdp/current/hive-webhcat/etc/webhcat',
                               owner = 'hcat',
                               group = 'hadoop',
                               create_parents = True,
@@ -258,32 +270,35 @@ class TestWebHCatServer(RMFTestCase):
     self.assertResourceCalled('XmlConfig', 'webhcat-site.xml',
                               owner = 'hcat',
                               group = 'hadoop',
-                              conf_dir = '/etc/hive-webhcat/conf',
+                              conf_dir = '/usr/hdp/current/hive-webhcat/etc/webhcat',
                               configurations = self.getConfig()['configurations']['webhcat-site'],
                               configuration_attributes = self.getConfig()['configuration_attributes']['webhcat-site']
     )
-    self.assertResourceCalled('File', '/etc/hive-webhcat/conf/webhcat-env.sh',
+
+    self.assertResourceCalledIgnoreEarlier('File', '/usr/hdp/current/hive-webhcat/etc/webhcat/webhcat-env.sh',
                               content = InlineTemplate(self.getConfig()['configurations']['webhcat-env']['content']),
                               owner = 'hcat',
                               group = 'hadoop',
                               )
-    self.assertResourceCalled('Directory', '/usr/hdp/current/hive-webhcat/conf',
+    self.assertResourceCalled('Directory', '/usr/hdp/current/hive-webhcat/etc/webhcat',
         cd_access = 'a',
         create_parents = True
     )
-    self.assertResourceCalled('File', '/etc/hive-webhcat/conf/webhcat-log4j.properties',
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-webhcat/etc/webhcat/webhcat-log4j.properties',
                               content = InlineTemplate('log4jproperties\nline2'),
                               owner = 'hcat',
                               group = 'hadoop',
                               mode = 0644,
                               )
 
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
   def test_pre_upgrade_restart(self):
     config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
     with open(config_file, "r") as f:
       json_content = json.load(f)
     version = '2.2.1.0-3242'
     json_content['commandParams']['version'] = version
+    json_content['hostLevelParams']['stack_version'] = "2.3"
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/webhcat_server.py",
                        classname = "WebHCatServer",
                        command = "pre_upgrade_restart",
@@ -296,6 +311,7 @@ class TestWebHCatServer(RMFTestCase):
     self.assertNoMoreResources()
 
   @patch("resource_management.core.shell.call")
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
   def test_pre_upgrade_restart_23(self, call_mock):
     import sys
 
@@ -326,6 +342,7 @@ class TestWebHCatServer(RMFTestCase):
 
 
   @patch("resource_management.core.shell.call")
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
   def test_rolling_restart_configure(self, call_mock):
     import sys
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
index a5dac6c..4d94723 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
@@ -1177,6 +1177,7 @@ class TestOozieServer(RMFTestCase):
   @patch("shutil.rmtree", new = MagicMock())
   @patch("glob.iglob")
   @patch("shutil.copy2", new = MagicMock())
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
   def test_upgrade(self, glob_mock, remove_mock,
       isfile_mock, exists_mock, isdir_mock):
 
@@ -1228,6 +1229,7 @@ class TestOozieServer(RMFTestCase):
   @patch("shutil.rmtree", new = MagicMock())
   @patch("glob.iglob")
   @patch("shutil.copy2", new = MagicMock())
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
   def test_upgrade_23(self, glob_mock, remove_mock,
       isfile_mock, exists_mock, isdir_mock):
 
@@ -1252,6 +1254,7 @@ class TestOozieServer(RMFTestCase):
 
     version = '2.3.0.0-1234'
     json_content['commandParams']['version'] = version
+    json_content['hostLevelParams']['stack_version'] = "2.3"
 
     mocks_dict = {}
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/oozie_server.py",
@@ -1287,6 +1290,7 @@ class TestOozieServer(RMFTestCase):
   @patch("shutil.rmtree", new = MagicMock())
   @patch("glob.iglob")
   @patch("shutil.copy2", new = MagicMock())
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value=True))
   def test_upgrade_23_with_type(self, glob_mock, remove_mock,
       isfile_mock, exists_mock, isdir_mock):
 
@@ -1311,6 +1315,7 @@ class TestOozieServer(RMFTestCase):
 
     version = '2.3.0.0-1234'
     json_content['commandParams']['version'] = version
+    json_content['hostLevelParams']['stack_version'] = "2.3"
     json_content['upgradeSummary'] = {
       'services': { 'OOZIE': { 'sourceStackId': 'HDP-2.3' }},
       'direction': 'UPGRADE',
@@ -1353,6 +1358,7 @@ class TestOozieServer(RMFTestCase):
   @patch("os.remove")
   @patch("shutil.rmtree", new = MagicMock())
   @patch("shutil.copy2", new = MagicMock())
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
   def test_downgrade_no_compression_library_copy(self, remove_mock,
       isfile_mock, exists_mock, isdir_mock):
 
@@ -1397,6 +1403,7 @@ class TestOozieServer(RMFTestCase):
     version = '2.3.0.0-1234'
     json_content['commandParams']['version'] = version
     json_content['hostLevelParams']['stack_name'] = "HDP"
+    json_content['hostLevelParams']['stack_version'] = "2.3"
 
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/oozie_server_upgrade.py",
       classname = "OozieUpgrade", command = "upgrade_oozie_database_and_sharelib",
@@ -1459,6 +1466,7 @@ class TestOozieServer(RMFTestCase):
     version = '2.3.0.0-1234'
     json_content['commandParams']['version'] = version
     json_content['hostLevelParams']['stack_name'] = "HDP"
+    json_content['hostLevelParams']['stack_version'] = "2.3"
 
     # use mysql external database
     json_content['configurations']['oozie-site']['oozie.service.JPAService.jdbc.driver'] = "com.mysql.jdbc.Driver"
@@ -1525,6 +1533,7 @@ class TestOozieServer(RMFTestCase):
   @patch("shutil.rmtree", new = MagicMock())
   @patch("glob.iglob")
   @patch("shutil.copy2", new = MagicMock())
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
   def test_upgrade_23_ensure_falcon_copied(self, glob_mock, remove_mock,
       isfile_mock, exists_mock, isdir_mock):
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_service_check.py b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_service_check.py
index 468f602..4edaacc 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_service_check.py
@@ -30,6 +30,7 @@ class TestOozieServiceCheck(RMFTestCase):
 
   @patch("resource_management.core.shell.call")
   @patch("glob.glob")
+  @patch("resource_management.libraries.functions.stack_select.get_hadoop_dir", new = MagicMock(return_value = "/usr/hdp/current/hadoop-client"))
   def test_service_check(self, glob_mock, call_mock):
     glob_mock.return_value = ["examples-dir", "b"]
 
@@ -39,7 +40,7 @@ class TestOozieServiceCheck(RMFTestCase):
     version = '2.3.0.0-1234'
     json_content['commandParams']['version'] = version
     json_content['hostLevelParams']['stack_name'] = 'HDP'
-    json_content['hostLevelParams']['stack_version'] = '2.2'
+    json_content['hostLevelParams']['stack_version'] = '2.3'
     json_content['configurations']['oozie-env']['service_check_job_name'] = 'map-reduce'
 
     mocks_dict = {}
@@ -65,7 +66,7 @@ class TestOozieServiceCheck(RMFTestCase):
       mode = 0755)
 
     self.assertResourceCalled('Execute',
-      ('/tmp/prepareOozieHdfsDirectories.sh', '/usr/hdp/current/oozie-client/conf', 'examples-dir', '/usr/hdp/current/hadoop-client/conf', 'c6402.ambari.apache.org:8050', 'hdfs://c6401.ambari.apache.org:8020', 'default', 'map-reduce'),
+      ('/tmp/prepareOozieHdfsDirectories.sh', '/usr/hdp/current/oozie-client/conf', 'examples-dir', '/usr/hdp/2.3.0.0-1234/hadoop/conf', 'c6402.ambari.apache.org:8050', 'hdfs://c6401.ambari.apache.org:8020', 'default', 'map-reduce'),
       tries = 3,
       try_sleep = 5,
       logoutput = True)

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
index 6a99dfa..6e5c1a1 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
@@ -755,6 +755,7 @@ class TestHistoryServer(RMFTestCase):
 
   @patch.object(functions, "get_stack_version", new = MagicMock(return_value="2.3.0.0-1234"))
   @patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs")
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
   def test_pre_upgrade_restart_23(self, copy_to_hdfs_mock):
     config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/default.json"
     with open(config_file, "r") as f:
@@ -783,13 +784,13 @@ class TestHistoryServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', None,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.3.0.0-1234/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
         action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/usr/hdp/2.3.0.0-1234/hadoop/conf',
     )
 
     self.assertNoMoreResources()

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
index 75eff39..43241a4 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
@@ -421,6 +421,7 @@ class TestMapReduce2Client(RMFTestCase):
     self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-client', version), sudo=True)
     self.assertNoMoreResources()
 
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
   def test_stack_upgrade_save_new_config(self):
     config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/client-upgrade.json"
     with open(config_file, "r") as f:

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py b/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
index f905cf9..3d2d4d3 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
@@ -95,10 +95,10 @@ class TestHookAfterInstall(RMFTestCase):
     self.assertResourceCalled('XmlConfig', 'core-site.xml',
       owner = 'hdfs',
       group = 'hadoop',
-      conf_dir = "/usr/hdp/current/hadoop-client/conf",
+      conf_dir = "/usr/hdp/2.3.0.0-1234/hadoop/conf",
       configurations = self.getConfig()['configurations']['core-site'],
       configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
-      only_if="ls /usr/hdp/current/hadoop-client/conf")
+      only_if="ls /usr/hdp/2.3.0.0-1234/hadoop/conf")
 
     self.assertResourceCalled('Directory',
                               '/etc/ambari-logsearch-logfeeder/conf',
@@ -169,10 +169,10 @@ class TestHookAfterInstall(RMFTestCase):
     self.assertResourceCalled('XmlConfig', 'core-site.xml',
       owner = 'hdfs',
       group = 'hadoop',
-      conf_dir = "/usr/hdp/current/hadoop-client/conf",
+      conf_dir = "/usr/hdp/2.3.0.0-1234/hadoop/conf",
       configurations = self.getConfig()['configurations']['core-site'],
       configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
-      only_if="ls /usr/hdp/current/hadoop-client/conf")
+      only_if="ls /usr/hdp/2.3.0.0-1234/hadoop/conf")
 
     self.assertResourceCalled('Directory',
                               '/etc/ambari-logsearch-logfeeder/conf',
@@ -276,10 +276,10 @@ class TestHookAfterInstall(RMFTestCase):
     self.assertResourceCalled('XmlConfig', 'core-site.xml',
       owner = 'hdfs',
       group = 'hadoop',
-      conf_dir = "/usr/hdp/current/hadoop-client/conf",
+      conf_dir = "/usr/hdp/2.3.0.0-1234/hadoop/conf",
       configurations = self.getConfig()['configurations']['core-site'],
       configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
-      only_if="ls /usr/hdp/current/hadoop-client/conf")
+      only_if="ls /usr/hdp/2.3.0.0-1234/hadoop/conf")
 
     self.assertResourceCalled('Directory',
                               '/etc/ambari-logsearch-logfeeder/conf',

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/test/python/stacks/2.0.6/hooks/before-ANY/test_before_any.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/hooks/before-ANY/test_before_any.py b/ambari-server/src/test/python/stacks/2.0.6/hooks/before-ANY/test_before_any.py
index 363458a..73828e8 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/hooks/before-ANY/test_before_any.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/hooks/before-ANY/test_before_any.py
@@ -182,15 +182,6 @@ class TestHookBeforeInstall(RMFTestCase):
     self.assertResourceCalled('Directory', '/etc/hadoop',
                               mode = 0755,
                               )
-    self.assertResourceCalled('Directory', '/etc/hadoop/conf.empty',
-                              owner = 'root',
-                              create_parents = True,
-                              group = 'hadoop',
-                              )
-    self.assertResourceCalled('Link', '/etc/hadoop/conf',
-                              not_if = 'ls /etc/hadoop/conf',
-                              to = '/etc/hadoop/conf.empty',
-                              )
     self.assertResourceCalled('File', '/etc/hadoop/conf/hadoop-env.sh',
                               content = InlineTemplate(self.getConfig()['configurations']['hadoop-env']['content']),
                               owner = 'hdfs',

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py b/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
index 239e2b5..7f2ed46 100644
--- a/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
+++ b/ambari-server/src/test/python/stacks/2.1/FALCON/test_falcon_server.py
@@ -236,8 +236,8 @@ class TestFalconServer(RMFTestCase):
 
     self.assertResourceCalled('Execute',
       '/usr/hdp/current/falcon-server/bin/falcon-stop',
-      path = ['/usr/hdp/current/hadoop-client/bin'], user='falcon',
-      environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client'})
+      path = ['/usr/hdp/2.2.1.0-2135/hadoop/bin'], user='falcon',
+      environment = {'HADOOP_HOME': '/usr/hdp/2.2.1.0-2135/hadoop'})
 
     self.assertResourceCalled('File', '/var/run/falcon/falcon.pid',
       action = ['delete'])
@@ -342,7 +342,7 @@ class TestFalconServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/apps/falcon',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.2.1.0-2135/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
         hdfs_site = self.getConfig()['configurations']['hdfs-site'],
@@ -351,7 +351,7 @@ class TestFalconServer(RMFTestCase):
         user = 'hdfs',
         dfs_type = '',
         owner = 'falcon',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         mode = 0777,
@@ -359,7 +359,7 @@ class TestFalconServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/apps/data-mirroring',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.2.1.0-2135/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         source = '/usr/hdp/current/falcon-server/data-mirroring',
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
@@ -372,7 +372,7 @@ class TestFalconServer(RMFTestCase):
         recursive_chown = True,
         owner = 'falcon',
         group = 'users',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         mode = 0770,
@@ -380,7 +380,7 @@ class TestFalconServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', None,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.2.1.0-2135/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
         hdfs_site = self.getConfig()['configurations']['hdfs-site'],
@@ -389,7 +389,7 @@ class TestFalconServer(RMFTestCase):
         user = 'hdfs',
         dfs_type = '',
         action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertResourceCalled('Directory', '/hadoop/falcon',
         owner = 'falcon',
@@ -404,17 +404,17 @@ class TestFalconServer(RMFTestCase):
         owner = 'falcon',
         create_parents = True,
     )
-   
+
     self.assertResourceCalled('Execute', '/usr/hdp/current/falcon-server/bin/falcon-config.sh server falcon',
-        environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client'},
-        path = ['/usr/hdp/current/hadoop-client/bin'],
+        environment = {'HADOOP_HOME': '/usr/hdp/2.2.1.0-2135/hadoop'},
+        path = ['/usr/hdp/2.2.1.0-2135/hadoop/bin'],
         user = 'falcon',
         not_if = 'ls /var/run/falcon/falcon.pid && ps -p ',
     )
 
     self.assertResourceCalled('Execute', '/usr/hdp/current/falcon-server/bin/falcon-start -port 15000',
-        environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client'},
-        path = ['/usr/hdp/current/hadoop-client/bin'],
+        environment = {'HADOOP_HOME': '/usr/hdp/2.2.1.0-2135/hadoop'},
+        path = ['/usr/hdp/2.2.1.0-2135/hadoop/bin'],
         user = 'falcon',
         not_if = 'ls /var/run/falcon/falcon.pid && ps -p ',
     )

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py b/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
index 98a46d0..db4e2a1 100644
--- a/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
+++ b/ambari-server/src/test/python/stacks/2.1/HIVE/test_hive_metastore.py
@@ -23,11 +23,22 @@ import os
 
 from mock.mock import MagicMock, call, patch
 from stacks.utils.RMFTestCase import *
-from resource_management.libraries.functions.constants import Direction
-from resource_management.libraries.script.script import Script
+from resource_management.libraries.functions.constants import Direction, StackFeature
+
+# used for faking out stack features when the config files used by unit tests use older stacks
+def mock_stack_feature(stack_feature, stack_version):
+  if stack_feature == StackFeature.ROLLING_UPGRADE:
+    return True
+  if stack_feature == StackFeature.CONFIG_VERSIONING:
+    return True
+  if stack_feature == StackFeature.HIVE_METASTORE_UPGRADE_SCHEMA:
+    return True
+
+  return False
 
 @patch("platform.linux_distribution", new = MagicMock(return_value="Linux"))
 @patch("resource_management.libraries.functions.get_user_call_output.get_user_call_output", new=MagicMock(return_value=(0,'123','')))
+@patch("resource_management.libraries.functions.stack_features.check_stack_feature", new=MagicMock(side_effect=mock_stack_feature))
 class TestHiveMetastore(RMFTestCase):
 
   COMMON_SERVICES_PACKAGE_DIR = "HIVE/0.12.0.2.0/package"
@@ -58,13 +69,13 @@ class TestHiveMetastore(RMFTestCase):
     self.assert_configure_default()
     self.assert_init_schema('aaa')
 
-    self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.err /var/run/hive/hive.pid /etc/hive/conf.server /var/log/hive',
-        environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client',
+    self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.err /var/run/hive/hive.pid /usr/hdp/current/hive-server2/conf/conf.server /var/log/hive',
+        environment = {'HADOOP_HOME': '/usr/hdp/2.2.1.0-2067/hadoop',
            'HIVE_BIN': 'hive',
            'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
         not_if = "ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
         user = 'hive',
-        path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/bin'],
+        path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/hdp/2.2.1.0-2067/hadoop/bin'],
     )
 
     self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive aaa com.mysql.jdbc.Driver',
@@ -89,13 +100,13 @@ class TestHiveMetastore(RMFTestCase):
     self.assert_configure_default()
     self.assert_init_schema('aaa')
 
-    self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.err /var/run/hive/hive.pid /etc/hive/conf.server /var/log/hive',
-        environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client',
+    self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.err /var/run/hive/hive.pid /usr/hdp/current/hive-server2/conf/conf.server /var/log/hive',
+        environment = {'HADOOP_HOME': '/usr/hdp/2.2.1.0-2067/hadoop',
            'HIVE_BIN': 'hive',
            'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
         not_if = "ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
         user = 'hive',
-        path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/bin'],
+        path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/hdp/2.2.1.0-2067/hadoop/bin'],
     )
     self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive aaa com.mysql.jdbc.Driver',
                               path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
@@ -152,13 +163,13 @@ class TestHiveMetastore(RMFTestCase):
 
     self.assert_configure_secured()
     self.assert_init_schema('asd')
-    self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.err /var/run/hive/hive.pid /etc/hive/conf.server /var/log/hive',
+    self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.err /var/run/hive/hive.pid /usr/hdp/current/hive-server2/conf/conf.server /var/log/hive',
         environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client',
            'HIVE_BIN': 'hive',
            'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
         not_if = "ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
         user = 'hive',
-        path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/bin'],
+        path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/hdp/current/hadoop-client/bin'],
     )
 
     self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive asd com.mysql.jdbc.Driver',
@@ -223,7 +234,7 @@ class TestHiveMetastore(RMFTestCase):
                               )
     self.assertResourceCalled('XmlConfig', 'hive-site.xml',
                               group = 'hadoop',
-                              conf_dir = '/etc/hive/conf.server',
+                              conf_dir = '/usr/hdp/current/hive-server2/conf/conf.server',
                               mode = 0600,
                               configuration_attributes = {u'final': {u'hive.optimize.bucketmapjoin.sortedmerge': u'true',
                                                                      u'javax.jdo.option.ConnectionDriverName': u'true',
@@ -231,7 +242,7 @@ class TestHiveMetastore(RMFTestCase):
                               owner = 'hive',
                               configurations = self.getConfig()['configurations']['hive-site'],
                               )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/conf.server/hive-env.sh',
                               content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
                               owner = 'hive',
                               group = 'hadoop',
@@ -283,7 +294,7 @@ class TestHiveMetastore(RMFTestCase):
     self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar',
         mode = 0644,
     )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hadoop-metrics2-hivemetastore.properties',
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/conf.server/hadoop-metrics2-hivemetastore.properties',
                               owner = 'hive',
                               group = 'hadoop',
                               content = Template('hadoop-metrics2-hivemetastore.properties.j2'),
@@ -295,8 +306,8 @@ class TestHiveMetastore(RMFTestCase):
                               )
 
   def assert_init_schema(self, password):
-    self.assertResourceCalled('Execute', 'export HIVE_CONF_DIR=/etc/hive/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -initSchema -dbType mysql -userName hive -passWord {password} -verbose'.format(password = password),
-        not_if = "ambari-sudo.sh su hive -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]export HIVE_CONF_DIR=/etc/hive/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -info -dbType mysql -userName hive -passWord {password} -verbose'".format(password = password),
+    self.assertResourceCalled('Execute', 'export HIVE_CONF_DIR=/usr/hdp/current/hive-server2/conf/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -initSchema -dbType mysql -userName hive -passWord {password} -verbose'.format(password = password),
+        not_if = "ambari-sudo.sh su hive -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]export HIVE_CONF_DIR=/usr/hdp/current/hive-server2/conf/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -info -dbType mysql -userName hive -passWord {password} -verbose'".format(password = password),
         user = 'hive',
     )
 
@@ -331,7 +342,7 @@ class TestHiveMetastore(RMFTestCase):
                               )
     self.assertResourceCalled('XmlConfig', 'hive-site.xml',
                               group = 'hadoop',
-                              conf_dir = '/etc/hive/conf.server',
+                              conf_dir = '/usr/hdp/current/hive-server2/conf/conf.server',
                               mode = 0600,
                               configuration_attributes = {u'final': {u'hive.optimize.bucketmapjoin.sortedmerge': u'true',
                                                                      u'javax.jdo.option.ConnectionDriverName': u'true',
@@ -339,7 +350,7 @@ class TestHiveMetastore(RMFTestCase):
                               owner = 'hive',
                               configurations = self.getConfig()['configurations']['hive-site'],
                               )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/conf.server/hive-env.sh',
                               content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
                               owner = 'hive',
                               group = 'hadoop',
@@ -356,7 +367,7 @@ class TestHiveMetastore(RMFTestCase):
                               group = 'root',
                               mode = 0644,
                               )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/zkmigrator_jaas.conf',
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/conf.server/zkmigrator_jaas.conf',
                               content = Template('zkmigrator_jaas.conf.j2'),
                               owner = 'hive',
                               group = 'hadoop',
@@ -396,7 +407,7 @@ class TestHiveMetastore(RMFTestCase):
     self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar',
         mode = 0644,
     )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hadoop-metrics2-hivemetastore.properties',
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/conf.server/hadoop-metrics2-hivemetastore.properties',
                               owner = 'hive',
                               group = 'hadoop',
                               content = Template('hadoop-metrics2-hivemetastore.properties.j2'),
@@ -449,7 +460,7 @@ class TestHiveMetastore(RMFTestCase):
                               ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hive-metastore', version), sudo=True,)
     self.assertNoMoreResources()
 
-
+  @patch("resource_management.libraries.functions.stack_select.get_hadoop_dir", new = MagicMock(return_value="/usr/hdp/current/hadoop-client"))
   def test_pre_upgrade_restart_ims(self):
     """
     Tests the state of the init_metastore_schema property on update
@@ -576,7 +587,7 @@ class TestHiveMetastore(RMFTestCase):
     self.assertResourceCalledIgnoreEarlier('Directory', '/var/lib/hive', owner = 'hive', group = 'hadoop',
       mode = 0755, create_parents = True, cd_access = 'a')
 
-    self.assertResourceCalled('Execute', ('rm', '-f', '/usr/hdp/current/hive-metastore/lib/ojdbc6.jar'),
+    self.assertResourceCalledIgnoreEarlier('Execute', ('rm', '-f', '/usr/hdp/current/hive-metastore/lib/ojdbc6.jar'),
         path = ['/bin', '/usr/bin/'],
         sudo = True)
 
@@ -639,6 +650,7 @@ class TestHiveMetastore(RMFTestCase):
   @patch("os.path.exists")
   @patch("resource_management.core.shell.call")
   @patch("resource_management.libraries.functions.get_stack_version")
+  @patch("resource_management.libraries.functions.stack_select.get_hadoop_dir", new = MagicMock(return_value = "/usr/hdp/current/hadoop-client"))
   def test_upgrade_metastore_schema_using_new_db(self, get_stack_version_mock, call_mock, os_path_exists_mock):
     get_stack_version_mock.return_value = '2.3.0.0-1234'
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py b/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py
index d3508e1..a337b0f 100644
--- a/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.2/PIG/test_pig_service_check.py
@@ -20,7 +20,20 @@ limitations under the License.
 from mock.mock import patch, MagicMock
 
 from stacks.utils.RMFTestCase import *
+from resource_management.libraries.functions.constants import StackFeature
 
+# used for faking out stack features when the config files used by unit tests use older stacks
+def mock_stack_feature(stack_feature, stack_version):
+  if stack_feature == StackFeature.ROLLING_UPGRADE:
+    return True
+  if stack_feature == StackFeature.CONFIG_VERSIONING:
+    return True
+  if stack_feature == StackFeature.PIG_ON_TEZ:
+    return True
+
+  return False
+
+@patch("resource_management.libraries.functions.stack_features.check_stack_feature", new=MagicMock(side_effect=mock_stack_feature))
 class TestPigServiceCheck(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "PIG/0.12.0.2.0/package"
   STACK_VERSION = "2.2"

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py b/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
index 40c9d47..922ace2 100644
--- a/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
+++ b/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py
@@ -59,7 +59,7 @@ class TestJobHistoryServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', None,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.2.1.0-2067/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
         hdfs_site = {u'a': u'b'},
@@ -68,7 +68,7 @@ class TestJobHistoryServer(RMFTestCase):
         user = 'hdfs',
         dfs_type = '',
         action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertResourceCalled('Execute', '/usr/hdp/current/spark-client/sbin/start-history-server.sh',
         environment = {'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
@@ -126,7 +126,7 @@ class TestJobHistoryServer(RMFTestCase):
         hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         default_fs= UnknownConfigurationMock(),
         hadoop_bin_dir='/usr/hdp/current/hadoop-client/bin',
-        hadoop_conf_dir='/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir='/etc/hadoop/conf',
         hdfs_site=UnknownConfigurationMock(),
         keytab=UnknownConfigurationMock(),
         kinit_path_local='/usr/bin/kinit',
@@ -176,7 +176,7 @@ class TestJobHistoryServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/user/spark',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.2.1.0-2067/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
         hdfs_site = {u'a': u'b'},
@@ -185,7 +185,7 @@ class TestJobHistoryServer(RMFTestCase):
         user = 'hdfs',
         dfs_type = '',
         owner = 'spark',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         mode = 0775,
@@ -193,7 +193,7 @@ class TestJobHistoryServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', None,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.2.1.0-2067/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
         hdfs_site = {u'a': u'b'},
@@ -202,7 +202,7 @@ class TestJobHistoryServer(RMFTestCase):
         user = 'hdfs',
         dfs_type = '',
         action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertResourceCalled('PropertiesFile', '/usr/hdp/current/spark-client/conf/spark-defaults.conf',
         owner = 'spark',
@@ -265,7 +265,7 @@ class TestJobHistoryServer(RMFTestCase):
         principal_name = UnknownConfigurationMock(),
         user = UnknownConfigurationMock(),
         owner = 'spark',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/etc/hadoop/conf',
         dfs_type = '',
         type = 'directory',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
@@ -283,7 +283,7 @@ class TestJobHistoryServer(RMFTestCase):
         user = UnknownConfigurationMock(),
         dfs_type = '',
         action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertResourceCalled('PropertiesFile', '/usr/hdp/current/spark-client/conf/spark-defaults.conf',
         owner = 'spark',

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_client.py b/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_client.py
index 96a2e9f..e136fa9 100644
--- a/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_client.py
+++ b/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_client.py
@@ -50,7 +50,7 @@ class TestMahoutClient(RMFTestCase):
       owner = "yarn",
       group = 'hadoop',
       mode = 0644,
-      conf_dir = '/usr/hdp/current/hadoop-client/conf',
+      conf_dir = '/usr/hdp/2.2.1.0-2067/hadoop/conf',
       configurations = self.getConfig()['configurations']['yarn-site'],
       configuration_attributes = self.getConfig()['configuration_attributes']['yarn-site']
     )

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py b/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py
index 7bda992..b80476c 100644
--- a/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py
@@ -45,7 +45,7 @@ class TestMahoutClient(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
                               immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                               security_enabled = False,
-                              hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              hadoop_bin_dir = '/usr/hdp/2.2.1.0-2067/hadoop/bin',
                               keytab = UnknownConfigurationMock(),
                               kinit_path_local = '/usr/bin/kinit',
                               user = 'hdfs',
@@ -53,64 +53,64 @@ class TestMahoutClient(RMFTestCase):
                               mode = 0770,
                               owner = 'ambari-qa',
                               action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
-                              hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                              hadoop_conf_dir = '/usr/hdp/2.2.1.0-2067/hadoop/conf',
                               type = 'directory',
                               )
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa/mahoutsmokeoutput',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.2.1.0-2067/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
         action = ['delete_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/usr/hdp/2.2.1.0-2067/hadoop/conf',
         type = 'directory',
     )
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa/mahoutsmokeinput',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.2.1.0-2067/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
         owner = 'ambari-qa',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/usr/hdp/2.2.1.0-2067/hadoop/conf',
         type = 'directory',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
     )
     self.assertResourceCalled('HdfsResource', '/user/ambari-qa/mahoutsmokeinput/sample-mahout-test.txt',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.2.1.0-2067/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         source = '/tmp/sample-mahout-test.txt',
         user = 'hdfs',
         dfs_type = '',
         owner = 'ambari-qa',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/usr/hdp/2.2.1.0-2067/hadoop/conf',
         type = 'file',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
     )
     self.assertResourceCalled('HdfsResource', None,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.2.1.0-2067/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         dfs_type = '',
         action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore', hdfs_site=self.getConfig()['configurations']['hdfs-site'], principal_name=UnknownConfigurationMock(), default_fs='hdfs://c6401.ambari.apache.org:8020',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/usr/hdp/2.2.1.0-2067/hadoop/conf',
     )
     self.assertResourceCalled('Execute', 'mahout seqdirectory --input /user/ambari-qa/mahoutsmokeinput/'
                                          'sample-mahout-test.txt --output /user/ambari-qa/mahoutsmokeoutput/ '
                                          '--charset utf-8',
-                              environment = {'HADOOP_CONF_DIR': '/usr/hdp/current/hadoop-client/conf',
-                                             'HADOOP_HOME': '/usr/hdp/current/hadoop-client',
+                              environment = {'HADOOP_CONF_DIR': '/usr/hdp/2.2.1.0-2067/hadoop/conf',
+                                             'HADOOP_HOME': '/usr/hdp/2.2.1.0-2067/hadoop',
                                              'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45',
                                              'MAHOUT_HOME': '/usr/hdp/current/mahout-client'},
                               path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
@@ -121,9 +121,9 @@ class TestMahoutClient(RMFTestCase):
     self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /user/ambari-qa/mahoutsmokeoutput/_SUCCESS',
                               try_sleep = 6,
                               tries = 10,
-                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              bin_dir = '/usr/hdp/2.2.1.0-2067/hadoop/bin',
                               user = 'ambari-qa',
-                              conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                              conf_dir = '/usr/hdp/2.2.1.0-2067/hadoop/conf',
                               )
     self.assertNoMoreResources()
 


[26/50] [abbrv] ambari git commit: AMBARI-22084. Upgrade Wizard Expand Details Does Not Work (alexantonenko)

Posted by rl...@apache.org.
AMBARI-22084. Upgrade Wizard Expand Details Does Not Work (alexantonenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f744a36d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f744a36d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f744a36d

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: f744a36d378765f50d4ab808065604f64c7ffb1d
Parents: 8e0f782
Author: Alex Antonenko <aa...@hortonworks.com>
Authored: Thu Sep 28 21:23:14 2017 +0300
Committer: Alex Antonenko <aa...@hortonworks.com>
Committed: Thu Sep 28 21:23:14 2017 +0300

----------------------------------------------------------------------
 .../main/admin/stack_upgrade/upgrade_wizard_view.js   | 14 +++++++++++---
 1 file changed, 11 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f744a36d/ambari-web/app/views/main/admin/stack_upgrade/upgrade_wizard_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/admin/stack_upgrade/upgrade_wizard_view.js b/ambari-web/app/views/main/admin/stack_upgrade/upgrade_wizard_view.js
index 415d87d..89c54ce 100644
--- a/ambari-web/app/views/main/admin/stack_upgrade/upgrade_wizard_view.js
+++ b/ambari-web/app/views/main/admin/stack_upgrade/upgrade_wizard_view.js
@@ -394,22 +394,30 @@ App.upgradeWizardView = Em.View.extend({
   },
 
   /**
+   * previous item request
+   */
+  prevItemRequest: null,
+
+  /**
    * poll for tasks when item is expanded
    */
   doUpgradeItemPolling: function () {
     var self = this;
     var item = this.get('runningItem') || this.get('failedItem');
-
+    var request = this.get('prevItemRequest');
+    if ( request ) request.abort();
     if (item && this.get('isDetailsOpened')) {
-      this.get('controller').getUpgradeItem(item).complete(function () {
+      request = this.get('controller').getUpgradeItem(item).complete(function () {
         self.set('upgradeItemTimer', setTimeout(function () {
           self.doUpgradeItemPolling();
         }, App.bgOperationsUpdateInterval));
       });
+
+      this.set('prevItemRequest', request);
     } else {
       clearTimeout(this.get('upgradeItemTimer'));
     }
-  }.observes('isDetailsOpened'),
+  }.observes('isDetailsOpened', 'runningItem', 'failedItem'),
 
   /**
    * set current upgrade item state to FAILED (for HOLDING_FAILED) or TIMED_OUT (for HOLDING_TIMED_OUT)


[33/50] [abbrv] ambari git commit: AMBARI-22090. INCORROUT in Hive SELECT-JOIN query using Hive View 2.0 (pallavkul)

Posted by rl...@apache.org.
AMBARI-22090. INCORROUT in Hive SELECT-JOIN query using Hive View 2.0 (pallavkul)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/045fc072
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/045fc072
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/045fc072

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 045fc072588632429bc53f6b6a1ee0e0a83179d0
Parents: d3b67ee
Author: pallavkul <pa...@gmail.com>
Authored: Fri Sep 29 20:51:41 2017 +0530
Committer: pallavkul <pa...@gmail.com>
Committed: Fri Sep 29 20:51:41 2017 +0530

----------------------------------------------------------------------
 .../src/main/resources/ui/app/components/query-result-table.js     | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/045fc072/contrib/views/hive20/src/main/resources/ui/app/components/query-result-table.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/query-result-table.js b/contrib/views/hive20/src/main/resources/ui/app/components/query-result-table.js
index be5c43a..48a0c00 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/components/query-result-table.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/query-result-table.js
@@ -44,7 +44,7 @@ export default Ember.Component.extend({
       tempColumn['label'] = column[0];
 
       let localValuePath = column[0];
-      tempColumn['valuePath'] = localValuePath.substring(localValuePath.lastIndexOf('.') +1 , localValuePath.length);
+      tempColumn['valuePath'] = localValuePath.split(".").join('');
 
       columnArr.push(tempColumn);
     });


[12/50] [abbrv] ambari git commit: AMBARI-22066. Update unit tests to test recursive variable replacements using VariableReplacementHelper (rlevas)

Posted by rl...@apache.org.
AMBARI-22066. Update unit tests to test recursive variable replacements using VariableReplacementHelper (rlevas)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b027837f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b027837f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b027837f

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: b027837fab2f6552c64aadb41ffb5667a2acdd26
Parents: 3e1a5cb
Author: Robert Levas <rl...@hortonworks.com>
Authored: Wed Sep 27 08:05:45 2017 -0400
Committer: Robert Levas <rl...@hortonworks.com>
Committed: Wed Sep 27 08:05:45 2017 -0400

----------------------------------------------------------------------
 .../kerberos/VariableReplacementHelperTest.java | 35 ++++++++++++++++++--
 1 file changed, 33 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b027837f/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/VariableReplacementHelperTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/VariableReplacementHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/VariableReplacementHelperTest.java
index e46294a..d724f03 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/VariableReplacementHelperTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/VariableReplacementHelperTest.java
@@ -33,9 +33,9 @@ import org.junit.experimental.categories.Category;
 
 import junit.framework.Assert;
 
-@Category({ category.KerberosTest.class})
+@Category({category.KerberosTest.class})
 public class VariableReplacementHelperTest {
-  VariableReplacementHelper helper = new VariableReplacementHelper();
+  private VariableReplacementHelper helper = new VariableReplacementHelper();
 
   @Test
   public void testReplaceVariables() throws AmbariException {
@@ -136,6 +136,37 @@ public class VariableReplacementHelperTest {
       // This is expected...
     }
   }
+  @Test
+  public void testReplaceVariablesRecursive() throws AmbariException {
+    Map<String, Map<String, String>> configurations = new HashMap<String, Map<String, String>>() {
+      {
+        put("", new HashMap<String, String>());
+
+        put("data", new HashMap<String, String>() {{
+          put("data_host1.example.com", "host 1 data");
+          put("data_host2.example.com", "host 2 data");
+          put("data_host3.example.com", "host 3 data");
+        }});
+      }
+    };
+
+    configurations.get("").put("h", "host");
+
+    // Shows ${h} was replaced
+    assertEquals("${data/data_${host}}", helper.replaceVariables("${data/data_${${h}}}", configurations));
+
+    // data_host.example.com does not exist in the data configuration
+    configurations.get("").put("host", "host.example.com");
+
+    // Shows ${host} was replaced
+    assertEquals("${data/data_host.example.com}", helper.replaceVariables("${data/data_${${h}}}", configurations));
+
+
+    for (int i = 1; i <= 3; i++) {
+      configurations.get("").put("host", String.format("host%d.example.com", i));
+      assertEquals(String.format("host %d data", i), helper.replaceVariables("${data/data_${${h}}}", configurations));
+    }
+  }
 
   @Test
   public void testReplaceComplicatedVariables() throws AmbariException {


[19/50] [abbrv] ambari git commit: AMBARI-22073 UI:VersionTag is no more needed for config creation/update. (atkach)

Posted by rl...@apache.org.
AMBARI-22073 UI:VersionTag is no more needed for config creation/update. (atkach)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/add33d4f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/add33d4f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/add33d4f

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: add33d4f1905b91b21db4b4af1b857374fbac5f8
Parents: 9b25914
Author: Andrii Tkach <at...@apache.org>
Authored: Wed Sep 27 16:54:49 2017 +0300
Committer: Andrii Tkach <at...@apache.org>
Committed: Thu Sep 28 13:49:27 2017 +0300

----------------------------------------------------------------------
 .../journalNode/progress_controller.js          |  4 +--
 .../highAvailability/progress_controller.js     |  4 +--
 .../main/admin/kerberos/step2_controller.js     |  7 +++--
 ambari-web/app/controllers/main/host/details.js |  2 --
 .../main/service/reassign/step4_controller.js   |  2 --
 .../app/controllers/wizard/step8_controller.js  | 15 ++---------
 .../app/mixins/common/configs/configs_saver.js  | 26 +++---------------
 .../main/service/configs/config_overridable.js  |  1 -
 ambari-web/app/utils/ajax/ajax.js               |  2 --
 .../journalNode/progress_controller_test.js     |  2 --
 .../progress_controller_test.js                 |  3 ---
 .../admin/kerberos/step2_controller_test.js     |  9 +++----
 .../mixins/common/configs/configs_saver_test.js | 28 ++++----------------
 13 files changed, 18 insertions(+), 87 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/add33d4f/ambari-web/app/controllers/main/admin/highAvailability/journalNode/progress_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/highAvailability/journalNode/progress_controller.js b/ambari-web/app/controllers/main/admin/highAvailability/journalNode/progress_controller.js
index 2959d67..d13e848 100644
--- a/ambari-web/app/controllers/main/admin/highAvailability/journalNode/progress_controller.js
+++ b/ambari-web/app/controllers/main/admin/highAvailability/journalNode/progress_controller.js
@@ -26,19 +26,17 @@ App.ManageJournalNodeProgressPageController = App.ManageJournalNodeWizardControl
   
   /**
    * Prepare object to send to the server to save configs
-   * Split all configs by site names and tag and note
+   * Split all configs by site names and note
    * @param siteNames Array
    * @param data Object
    * @param note String
    */
   reconfigureSites: function(siteNames, data, note) {
-    var tagName = 'version' + App.dateTime();
 
     return siteNames.map(function(_siteName) {
       var config = data.items.findProperty('type', _siteName);
       var configToSave = {
         type: _siteName,
-        tag: tagName,
         properties: config && config.properties,
         service_config_version_note: note || ''
       };

http://git-wip-us.apache.org/repos/asf/ambari/blob/add33d4f/ambari-web/app/controllers/main/admin/highAvailability/progress_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/highAvailability/progress_controller.js b/ambari-web/app/controllers/main/admin/highAvailability/progress_controller.js
index 9039b9d..b2d19dd 100644
--- a/ambari-web/app/controllers/main/admin/highAvailability/progress_controller.js
+++ b/ambari-web/app/controllers/main/admin/highAvailability/progress_controller.js
@@ -67,18 +67,16 @@ App.HighAvailabilityProgressPageController = App.HighAvailabilityWizardControlle
 
   /**
    * Prepare object to send to the server to save configs
-   * Split all configs by site names and tag and note
+   * Split all configs by site names and note
    * @param siteNames Array
    * @param data Object
    * @param note String
    */
   reconfigureSites: function(siteNames, data, note) {
-    var tagName = App.get('testMode') ? 'version1' : 'version' + (new Date).getTime();
     return siteNames.map(function(_siteName) {
       var config = data.items.findProperty('type', _siteName);
       var configToSave = {
         type: _siteName,
-        tag: tagName,
         properties: config && config.properties,
         service_config_version_note: note || ''
       };

http://git-wip-us.apache.org/repos/asf/ambari/blob/add33d4f/ambari-web/app/controllers/main/admin/kerberos/step2_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/kerberos/step2_controller.js b/ambari-web/app/controllers/main/admin/kerberos/step2_controller.js
index 258a384..05b0b31 100644
--- a/ambari-web/app/controllers/main/admin/kerberos/step2_controller.js
+++ b/ambari-web/app/controllers/main/admin/kerberos/step2_controller.js
@@ -199,13 +199,12 @@ App.KerberosWizardStep2Controller = App.WizardStep7Controller.extend(App.KDCCred
   createConfigurations: function () {
     var service = App.StackService.find().findProperty('serviceName', 'KERBEROS'),
         serviceConfigTags = [],
-        tag = 'version' + (new Date).getTime(),
         allConfigData = [],
         serviceConfigData = [];
 
     Object.keys(service.get('configTypes')).forEach(function (type) {
       if (!serviceConfigTags.someProperty('type', type)) {
-        var obj = this.createKerberosSiteObj(type, tag);
+        var obj = this.createKerberosSiteObj(type);
         obj.service_config_version_note = Em.I18n.t('admin.kerberos.wizard.configuration.note');
         serviceConfigTags.pushObject(obj);
       }
@@ -233,7 +232,7 @@ App.KerberosWizardStep2Controller = App.WizardStep7Controller.extend(App.KDCCred
     });
   },
 
-  createKerberosSiteObj: function (site, tag) {
+  createKerberosSiteObj: function (site) {
     var properties = {};
     var content = this.get('stepConfigs')[0].get('configs');
     var configs = content.filterProperty('filename', site + '.xml');
@@ -253,7 +252,7 @@ App.KerberosWizardStep2Controller = App.WizardStep7Controller.extend(App.KDCCred
     this.tweakKdcTypeValue(properties);
     this.tweakManualKdcProperties(properties);
     this.tweakIpaKdcProperties(properties);
-    return {"type": site, "tag": tag, "properties": properties};
+    return {"type": site, "properties": properties};
   },
 
   tweakKdcTypeValue: function (properties) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/add33d4f/ambari-web/app/controllers/main/host/details.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/host/details.js b/ambari-web/app/controllers/main/host/details.js
index 299a0d8..e3b5471 100644
--- a/ambari-web/app/controllers/main/host/details.js
+++ b/ambari-web/app/controllers/main/host/details.js
@@ -1338,14 +1338,12 @@ App.MainHostDetailsController = Em.Controller.extend(App.SupportClientConfigsDow
     if (groups.length) {
       groups.forEach(function (group) {
         var desiredConfigs = [],
-          tag = 'version' + (new Date).getTime(),
           properties = group.properties;
 
         for (var site in properties) {
           if (!properties.hasOwnProperty(site) || Em.isNone(properties[site])) continue;
           desiredConfigs.push({
             "type": site,
-            "tag": tag,
             "properties": properties[site],
             "properties_attributes": group.properties_attributes[site],
             "service_config_version_note": Em.I18n.t('hosts.host.configs.save.note').format(App.format.role(componentName, false))

http://git-wip-us.apache.org/repos/asf/ambari/blob/add33d4f/ambari-web/app/controllers/main/service/reassign/step4_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/service/reassign/step4_controller.js b/ambari-web/app/controllers/main/service/reassign/step4_controller.js
index c610b13..108b3e9 100644
--- a/ambari-web/app/controllers/main/service/reassign/step4_controller.js
+++ b/ambari-web/app/controllers/main/service/reassign/step4_controller.js
@@ -315,11 +315,9 @@ App.ReassignMasterWizardStep4Controller = App.HighAvailabilityProgressPageContro
    */
   getServiceConfigData: function (configs, attributes) {
     var componentName = this.get('content.reassign.component_name');
-    var tagName = 'version' + (new Date).getTime();
     var configData = Object.keys(configs).map(function (_siteName) {
       return {
         type: _siteName,
-        tag: tagName,
         properties: configs[_siteName],
         properties_attributes: attributes[_siteName] || {},
         service_config_version_note: Em.I18n.t('services.reassign.step4.save.configuration.note').format(App.format.role(componentName, false))

http://git-wip-us.apache.org/repos/asf/ambari/blob/add33d4f/ambari-web/app/controllers/wizard/step8_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/wizard/step8_controller.js b/ambari-web/app/controllers/wizard/step8_controller.js
index 39981f7..94139e0 100644
--- a/ambari-web/app/controllers/wizard/step8_controller.js
+++ b/ambari-web/app/controllers/wizard/step8_controller.js
@@ -1459,11 +1459,9 @@ App.WizardStep8Controller = Em.Controller.extend(App.AddSecurityConfigs, App.wiz
    * @method createConfigurations
    */
   createConfigurations: function () {
-    var tag = this.getServiceConfigVersion();
-
     if (this.get('isInstaller')) {
       /** add cluster-env **/
-      this.get('serviceConfigTags').pushObject(this.createDesiredConfig('cluster-env', tag, this.get('configs').filterProperty('filename', 'cluster-env.xml')));
+      this.get('serviceConfigTags').pushObject(this.createDesiredConfig('cluster-env', this.get('configs').filterProperty('filename', 'cluster-env.xml')));
     }
 
     this.get('selectedServices').forEach(function (service) {
@@ -1471,7 +1469,7 @@ App.WizardStep8Controller = Em.Controller.extend(App.AddSecurityConfigs, App.wiz
         if (!this.get('serviceConfigTags').someProperty('type', type)) {
           var configs = this.get('configs').filterProperty('filename', App.config.getOriginalFileName(type));
           var serviceConfigNote = this.getServiceConfigNote(type, service.get('displayName'));
-          this.get('serviceConfigTags').pushObject(this.createDesiredConfig(type, tag, configs, serviceConfigNote));
+          this.get('serviceConfigTags').pushObject(this.createDesiredConfig(type, configs, serviceConfigNote));
         }
       }, this);
     }, this);
@@ -1479,15 +1477,6 @@ App.WizardStep8Controller = Em.Controller.extend(App.AddSecurityConfigs, App.wiz
   },
 
   /**
-   * Get config version tag
-   *
-   * @returns {string}
-   */
-  getServiceConfigVersion: function() {
-    return 'version' + (this.get('isAddService') ? (new Date).getTime() : '1');
-  },
-
-  /**
    * Get config version message
    *
    * @param type

http://git-wip-us.apache.org/repos/asf/ambari/blob/add33d4f/ambari-web/app/mixins/common/configs/configs_saver.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins/common/configs/configs_saver.js b/ambari-web/app/mixins/common/configs/configs_saver.js
index 6b69ca1..33a90a3 100644
--- a/ambari-web/app/mixins/common/configs/configs_saver.js
+++ b/ambari-web/app/mixins/common/configs/configs_saver.js
@@ -405,12 +405,11 @@ App.ConfigsSaverMixin = Em.Mixin.create({
       serviceConfigNote = serviceConfigNote || "";
 
       fileNamesToSave.forEach(function(fName) {
-        var tagVersion = this.getUniqueTag();
 
         if (this.allowSaveSite(fName)) {
           var properties = configsToSave.filterProperty('filename', fName);
           var type = App.config.getConfigTagFromFileName(fName);
-          desired_config.push(this.createDesiredConfig(type, tagVersion, properties, serviceConfigNote, ignoreVersionNote));
+          desired_config.push(this.createDesiredConfig(type, properties, serviceConfigNote, ignoreVersionNote));
         }
       }, this);
     }
@@ -418,23 +417,6 @@ App.ConfigsSaverMixin = Em.Mixin.create({
   },
 
   /**
-   * generate unique tag
-   * @returns {string}
-   */
-  getUniqueTag: function() {
-    var timestamp = (new Date).getTime();
-    var tagVersion = "version" + timestamp;
-
-    while(this.get('_timeStamps')[tagVersion]) {
-      timestamp++;
-      tagVersion = "version" + timestamp;
-    }
-    /** @see <code>_timeStamps<code> **/
-    this.get('_timeStamps')[tagVersion] = true;
-    return tagVersion;
-  },
-
-  /**
    * For some file names we have a restriction
    * and can't save them, in this case method will return false
    *
@@ -467,17 +449,15 @@ App.ConfigsSaverMixin = Em.Mixin.create({
   /**
    * generating common JSON object for desired config
    * @param {string} type - file name without '.xml'
-   * @param {string} tagVersion - version + timestamp
    * @param {App.ConfigProperty[]} properties - array of properties from model
    * @param {string} [serviceConfigNote='']
    * @param {boolean} [ignoreVersionNote=false]
    * @returns {{type: string, tag: string, properties: {}, properties_attributes: {}|undefined, service_config_version_note: string|undefined}}
    */
-  createDesiredConfig: function(type, tagVersion, properties, serviceConfigNote, ignoreVersionNote) {
-    Em.assert('type and tagVersion should be defined', type && tagVersion);
+  createDesiredConfig: function(type, properties, serviceConfigNote, ignoreVersionNote) {
+    Em.assert('type should be defined', type);
     var desired_config = {
       "type": type,
-      "tag": tagVersion,
       "properties": {}
     };
     if (!ignoreVersionNote) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/add33d4f/ambari-web/app/mixins/main/service/configs/config_overridable.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mixins/main/service/configs/config_overridable.js b/ambari-web/app/mixins/main/service/configs/config_overridable.js
index e7d223d..0e8b7ee 100644
--- a/ambari-web/app/mixins/main/service/configs/config_overridable.js
+++ b/ambari-web/app/mixins/main/service/configs/config_overridable.js
@@ -246,7 +246,6 @@ App.ConfigOverridable = Em.Mixin.create({
           var type = Em.get(cst, 'site') || Em.get(cst, 'type');
           return {
             type: type,
-            tag: 'version' + (new Date).getTime(),
             properties: typeToPropertiesMap[type]
           };
         }),

http://git-wip-us.apache.org/repos/asf/ambari/blob/add33d4f/ambari-web/app/utils/ajax/ajax.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/utils/ajax/ajax.js b/ambari-web/app/utils/ajax/ajax.js
index 941f5a1..c32d8d4 100644
--- a/ambari-web/app/utils/ajax/ajax.js
+++ b/ambari-web/app/utils/ajax/ajax.js
@@ -791,7 +791,6 @@ var urls = {
           Clusters: {
             desired_config: {
               "type": data.siteName,
-              "tag": 'version' + (new Date).getTime(),
               "properties": data.properties,
               "service_config_version_note": data.service_config_version_note
 
@@ -1456,7 +1455,6 @@ var urls = {
           Clusters: {
             desired_config: {
               "type": data.siteName,
-              "tag": 'version' + (new Date).getTime(),
               "properties": data.properties
             }
           }

http://git-wip-us.apache.org/repos/asf/ambari/blob/add33d4f/ambari-web/test/controllers/main/admin/highAvailability/journalNode/progress_controller_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/main/admin/highAvailability/journalNode/progress_controller_test.js b/ambari-web/test/controllers/main/admin/highAvailability/journalNode/progress_controller_test.js
index 868f832..327a881 100644
--- a/ambari-web/test/controllers/main/admin/highAvailability/journalNode/progress_controller_test.js
+++ b/ambari-web/test/controllers/main/admin/highAvailability/journalNode/progress_controller_test.js
@@ -52,13 +52,11 @@ describe('App.ManageJournalNodeProgressPageController', function () {
           "properties": {},
           "properties_attributes": {},
           "service_config_version_note": "note",
-          "tag": "version1",
           "type": "site1"
         },
         {
           "properties": undefined,
           "service_config_version_note": "note",
-          "tag": "version1",
           "type": "site2"
         }
       ]);

http://git-wip-us.apache.org/repos/asf/ambari/blob/add33d4f/ambari-web/test/controllers/main/admin/highAvailability/progress_controller_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/main/admin/highAvailability/progress_controller_test.js b/ambari-web/test/controllers/main/admin/highAvailability/progress_controller_test.js
index 0f1b18d..ccadd6b 100644
--- a/ambari-web/test/controllers/main/admin/highAvailability/progress_controller_test.js
+++ b/ambari-web/test/controllers/main/admin/highAvailability/progress_controller_test.js
@@ -64,7 +64,6 @@ describe('App.HighAvailabilityProgressPageController', function () {
         result: [
           {
             type: "site1",
-            tag: "version1",
             properties: {
               site1_property1: "site1_property1_value",
               site1_property2: "site1_property2_value"
@@ -78,7 +77,6 @@ describe('App.HighAvailabilityProgressPageController', function () {
           },
           {
             type: "site2",
-            tag: "version1",
             properties: {
               site2_property1: "site2_property1_value",
               site2_property2: "site2_property2_value"
@@ -109,7 +107,6 @@ describe('App.HighAvailabilityProgressPageController', function () {
         result: [
           {
             type: "site1",
-            tag: "version1",
             properties: {
               site1_property1: "site1_property1_value",
               site1_property2: "site1_property2_value"

http://git-wip-us.apache.org/repos/asf/ambari/blob/add33d4f/ambari-web/test/controllers/main/admin/kerberos/step2_controller_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/main/admin/kerberos/step2_controller_test.js b/ambari-web/test/controllers/main/admin/kerberos/step2_controller_test.js
index 78e43f0..794fe4a 100644
--- a/ambari-web/test/controllers/main/admin/kerberos/step2_controller_test.js
+++ b/ambari-web/test/controllers/main/admin/kerberos/step2_controller_test.js
@@ -632,9 +632,8 @@ describe('App.KerberosWizardStep2Controller', function() {
           filename: 'site.xml'
         }]
       })]);
-      expect(controller.createKerberosSiteObj('site', 'tag')).to.be.eql({
+      expect(controller.createKerberosSiteObj('site')).to.be.eql({
         "type": 'site',
-        "tag": 'tag',
         "properties": {}
       });
     });
@@ -647,9 +646,8 @@ describe('App.KerberosWizardStep2Controller', function() {
           filename: 'site.xml'
         }]
       })]);
-      expect(controller.createKerberosSiteObj('site', 'tag')).to.be.eql({
+      expect(controller.createKerberosSiteObj('site')).to.be.eql({
         "type": 'site',
-        "tag": 'tag',
         "properties": {
           'kdc_hosts': {
             displayType: 'host',
@@ -667,9 +665,8 @@ describe('App.KerberosWizardStep2Controller', function() {
           filename: 'site.xml'
         }]
       })]);
-      expect(controller.createKerberosSiteObj('site', 'tag')).to.be.eql({
+      expect(controller.createKerberosSiteObj('site')).to.be.eql({
         "type": 'site',
-        "tag": 'tag',
         "properties": {
           'n1': {
             name: 'n1',

http://git-wip-us.apache.org/repos/asf/ambari/blob/add33d4f/ambari-web/test/mixins/common/configs/configs_saver_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/mixins/common/configs/configs_saver_test.js b/ambari-web/test/mixins/common/configs/configs_saver_test.js
index 855823c..f20a49f 100644
--- a/ambari-web/test/mixins/common/configs/configs_saver_test.js
+++ b/ambari-web/test/mixins/common/configs/configs_saver_test.js
@@ -136,18 +136,16 @@ describe('App.ConfigsSaverMixin', function() {
     });
 
     it('generates config without properties', function() {
-      expect(mixin.createDesiredConfig('type1', 'version1')).to.eql({
+      expect(mixin.createDesiredConfig('type1')).to.eql({
         "type": 'type1',
-        "tag": 'version1',
         "properties": {},
         "service_config_version_note": ""
       })
     });
 
     it('generates config with properties', function() {
-      expect(mixin.createDesiredConfig('type1', 'version1', [Em.Object.create({name: 'p1', value: 'v1', isRequiredByAgent: true}), Em.Object.create({name: 'p2', value: 'v2', isRequiredByAgent: true})], "note")).to.eql({
+      expect(mixin.createDesiredConfig('type1', [Em.Object.create({name: 'p1', value: 'v1', isRequiredByAgent: true}), Em.Object.create({name: 'p2', value: 'v2', isRequiredByAgent: true})], "note")).to.eql({
         "type": 'type1',
-        "tag": 'version1',
         "properties": {
           "p1": 'v1',
           "p2": 'v2'
@@ -157,9 +155,8 @@ describe('App.ConfigsSaverMixin', function() {
     });
 
     it('generates config with properties and skip isRequiredByAgent', function() {
-      expect(mixin.createDesiredConfig('type1', 'version1', [Em.Object.create({name: 'p1', value: 'v1', isRequiredByAgent: true}), Em.Object.create({name: 'p2', value: 'v2', isRequiredByAgent: false})], "note")).to.eql({
+      expect(mixin.createDesiredConfig('type1', [Em.Object.create({name: 'p1', value: 'v1', isRequiredByAgent: true}), Em.Object.create({name: 'p2', value: 'v2', isRequiredByAgent: false})], "note")).to.eql({
         "type": 'type1',
-        "tag": 'version1',
         "properties": {
           p1: 'v1'
         },
@@ -168,9 +165,8 @@ describe('App.ConfigsSaverMixin', function() {
     });
 
     it('generates config with properties and skip service_config_version_note', function() {
-      expect(mixin.createDesiredConfig('type1', 'version1', [Em.Object.create({name: 'p1', value: 'v1', isRequiredByAgent: true})], "note", true)).to.eql({
+      expect(mixin.createDesiredConfig('type1', [Em.Object.create({name: 'p1', value: 'v1', isRequiredByAgent: true})], "note", true)).to.eql({
         "type": 'type1',
-        "tag": 'version1',
         "properties": {
           p1: 'v1'
         }
@@ -178,7 +174,7 @@ describe('App.ConfigsSaverMixin', function() {
     });
 
     it('generates config with final, password, user, group, text, additional_user_property, not_managed_hdfs_path, value_from_property_file', function() {
-      expect(mixin.createDesiredConfig('type1', 'version1', [
+      expect(mixin.createDesiredConfig('type1', [
           Em.Object.create({name: 'p1', value: 'v1', isFinal: true, isRequiredByAgent: true}),
           Em.Object.create({name: 'p2', value: 'v2', isRequiredByAgent: true}),
           Em.Object.create({name: 'p3', value: 'v3', isRequiredByAgent: true, propertyType: ["PASSWORD", "USER", "GROUP"]}),
@@ -188,7 +184,6 @@ describe('App.ConfigsSaverMixin', function() {
           Em.Object.create({name: 'p7', value: 'v7', isRequiredByAgent: true, propertyType: ["PASSWORD"]})
         ], "note")).to.eql({
         "type": 'type1',
-        "tag": 'version1',
         "properties": {
           p1: 'v1',
           p2: 'v2',
@@ -259,19 +254,6 @@ describe('App.ConfigsSaverMixin', function() {
     })
   });
 
-  describe('#getUniqueTag', function() {
-
-    it('should generate unique tags', function() {
-      var tags = [];
-      for (var i = 0; i < 3; i++) {
-        tags.push(mixin.getUniqueTag());
-      }
-      expect(tags[1]).to.not.be.equal(tags[0]);
-      expect(tags[2]).to.not.be.equal(tags[1]);
-      expect(tags[0]).to.not.be.equal(tags[2]);
-    });
-  });
-
   describe('#getModifiedConfigs', function () {
     var configs = [
       Em.Object.create({


[48/50] [abbrv] ambari git commit: AMBARI-22107. Enable new Log Aggregation format for Yarn default configuration in Ambari.(vbrodetskyi)

Posted by rl...@apache.org.
AMBARI-22107. Enable new Log Aggregation format for Yarn default configuration in Ambari.(vbrodetskyi)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/19fe4cf1
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/19fe4cf1
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/19fe4cf1

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 19fe4cf1e7450c7230031c660295ec6e04958519
Parents: f92da65
Author: Vitaly Brodetskyi <vb...@hortonworks.com>
Authored: Mon Oct 2 13:38:41 2017 +0300
Committer: Vitaly Brodetskyi <vb...@hortonworks.com>
Committed: Mon Oct 2 13:38:41 2017 +0300

----------------------------------------------------------------------
 .../2.6/services/YARN/configuration/yarn-site.xml | 18 ++++++++++++++++++
 1 file changed, 18 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/19fe4cf1/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/configuration/yarn-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/configuration/yarn-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/configuration/yarn-site.xml
index 9094b15..b5eedea 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/configuration/yarn-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/YARN/configuration/yarn-site.xml
@@ -117,4 +117,22 @@
     </depends-on>
     <on-ambari-upgrade add="false"/>
   </property>
+  <property>
+    <name>yarn.log-aggregation.file-formats</name>
+    <value>IndexedFormat,TFile</value>
+    <description></description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>yarn.log-aggregation.file-controller.TFile.class</name>
+    <value>org.apache.hadoop.yarn.logaggregation.filecontroller.tfile.LogAggregationTFileController</value>
+    <description></description>
+    <on-ambari-upgrade add="true"/>
+  </property>
+  <property>
+    <name>yarn.log-aggregation.file-controller.IndexedFormat.class</name>
+    <value>org.apache.hadoop.yarn.logaggregation.filecontroller.ifile.LogAggregationIndexedFileController</value>
+    <description></description>
+    <on-ambari-upgrade add="true"/>
+  </property>
 </configuration>


[29/50] [abbrv] ambari git commit: AMBARI-22093 Log Search UI: implement service logs actions functionality. (ababiichuk)

Posted by rl...@apache.org.
AMBARI-22093 Log Search UI: implement service logs actions functionality. (ababiichuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8852edd2
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8852edd2
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8852edd2

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 8852edd2f5458a22109bc0931fd6e91341d0523f
Parents: 7950e3c
Author: ababiichuk <ab...@hortonworks.com>
Authored: Fri Sep 29 15:45:40 2017 +0300
Committer: ababiichuk <ab...@hortonworks.com>
Committed: Fri Sep 29 15:45:40 2017 +0300

----------------------------------------------------------------------
 .../ambari-logsearch-web/src/app/app.module.ts  |   6 ++
 .../classes/active-service-log-entry.class.ts   |  23 ++++
 .../src/app/classes/list-item.class.ts          |   1 +
 .../queries/audit-logs-query-params.class.ts    |   2 +-
 ...service-logs-truncated-query-params.class.ts |  36 +++++++
 .../classes/service-log-context-entry.class.ts  |  26 +++++
 .../dropdown-button.component.html              |   3 +-
 .../dropdown-button.component.spec.ts           |   5 +-
 .../dropdown-list/dropdown-list.component.html  |   2 +-
 .../dropdown-list.component.spec.ts             |  12 ++-
 .../dropdown-list/dropdown-list.component.ts    |   9 +-
 .../filter-button.component.spec.ts             |   5 +-
 .../filter-dropdown.component.spec.ts           |   5 +-
 .../filters-panel.component.spec.ts             |   5 +-
 .../log-context/log-context.component.html      |  33 ++++++
 .../log-context/log-context.component.less      |  23 ++++
 .../log-context/log-context.component.spec.ts   | 108 +++++++++++++++++++
 .../log-context/log-context.component.ts        |  91 ++++++++++++++++
 .../log-file-entry.component.html               |  20 ++++
 .../log-file-entry.component.less               |  31 ++++++
 .../log-file-entry.component.spec.ts            |  56 ++++++++++
 .../log-file-entry/log-file-entry.component.ts  |  51 +++++++++
 .../logs-container.component.html               |   8 +-
 .../logs-container.component.spec.ts            |   5 +-
 .../logs-container/logs-container.component.ts  |  29 +++--
 .../logs-list/logs-list.component.html          |  30 +++---
 .../logs-list/logs-list.component.less          |  36 +------
 .../logs-list/logs-list.component.spec.ts       |   3 +
 .../components/logs-list/logs-list.component.ts |  34 +++++-
 .../main-container.component.html               |   7 ++
 .../main-container.component.less               |   4 +
 .../main-container.component.spec.ts            |  13 ++-
 .../main-container/main-container.component.ts  |  28 ++++-
 .../menu-button/menu-button.component.spec.ts   |   5 +-
 .../timezone-picker.component.spec.ts           |   5 +-
 .../src/app/components/variables.less           |  30 ++++++
 .../src/app/models/app-state.model.ts           |  10 +-
 .../src/app/models/bar-graph.model.ts           |   2 +-
 .../src/app/models/graph.model.ts               |   2 +-
 .../src/app/models/log.model.ts                 |   1 +
 .../src/app/models/store.model.ts               |  11 ++
 .../services/component-actions.service.spec.ts  |   8 +-
 .../app/services/component-actions.service.ts   |  50 ++++++++-
 .../component-generator.service.spec.ts         |  10 +-
 .../src/app/services/http-client.service.ts     |   7 +-
 .../app/services/logs-container.service.spec.ts |   8 +-
 .../src/app/services/logs-container.service.ts  |  70 ++++++++++--
 .../app/services/storage/reducers.service.ts    |   2 +
 .../storage/service-logs-truncated.service.ts   |  32 ++++++
 .../src/assets/i18n/en.json                     |   7 +-
 50 files changed, 908 insertions(+), 102 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/app.module.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/app.module.ts b/ambari-logsearch/ambari-logsearch-web/src/app/app.module.ts
index c4dc698..ff791fe 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/app.module.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/app.module.ts
@@ -44,6 +44,7 @@ import {AppStateService} from '@app/services/storage/app-state.service';
 import {AuditLogsService} from '@app/services/storage/audit-logs.service';
 import {ServiceLogsService} from '@app/services/storage/service-logs.service';
 import {ServiceLogsHistogramDataService} from '@app/services/storage/service-logs-histogram-data.service';
+import {ServiceLogsTruncatedService} from '@app/services/storage/service-logs-truncated.service';
 import {GraphsService} from '@app/services/storage/graphs.service';
 import {HostsService} from '@app/services/storage/hosts.service';
 import {UserConfigsService} from '@app/services/storage/user-configs.service';
@@ -76,6 +77,8 @@ import {NodeBarComponent} from '@app/components/node-bar/node-bar.component';
 import {SearchBoxComponent} from '@app/components/search-box/search-box.component';
 import {TimeRangePickerComponent} from '@app/components/time-range-picker/time-range-picker.component';
 import {DatePickerComponent} from '@app/components/date-picker/date-picker.component';
+import {LogContextComponent} from '@app/components/log-context/log-context.component';
+import {LogFileEntryComponent} from '@app/components/log-file-entry/log-file-entry.component';
 
 import {TimeZoneAbbrPipe} from '@app/pipes/timezone-abbr.pipe';
 import {TimerSecondsPipe} from '@app/pipes/timer-seconds.pipe';
@@ -124,6 +127,8 @@ export function getXHRBackend(injector: Injector, browser: BrowserXhr, xsrf: XSR
     SearchBoxComponent,
     TimeRangePickerComponent,
     DatePickerComponent,
+    LogContextComponent,
+    LogFileEntryComponent,
     TimeZoneAbbrPipe,
     TimerSecondsPipe
   ],
@@ -157,6 +162,7 @@ export function getXHRBackend(injector: Injector, browser: BrowserXhr, xsrf: XSR
     AuditLogsService,
     ServiceLogsService,
     ServiceLogsHistogramDataService,
+    ServiceLogsTruncatedService,
     GraphsService,
     HostsService,
     UserConfigsService,

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/classes/active-service-log-entry.class.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/active-service-log-entry.class.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/active-service-log-entry.class.ts
new file mode 100644
index 0000000..d3d7d95
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/classes/active-service-log-entry.class.ts
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+export interface ActiveServiceLogEntry {
+  id: string;
+  host_name: string;
+  component_name: string;
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/classes/list-item.class.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/list-item.class.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/list-item.class.ts
index adb023b..1aaaecc 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/list-item.class.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/classes/list-item.class.ts
@@ -22,4 +22,5 @@ export interface ListItem {
   value: any;
   iconClass?: string;
   isChecked?: boolean;
+  action?: string;
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/audit-logs-query-params.class.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/audit-logs-query-params.class.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/audit-logs-query-params.class.ts
index e36bf18..3727abb 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/audit-logs-query-params.class.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/audit-logs-query-params.class.ts
@@ -35,7 +35,7 @@ export class AuditLogsQueryParams extends QueryParams {
   pageSize: string;
   startIndex: string;
   sortBy?: string;
-  sortType?: string;
+  sortType?: 'asc' | 'desc';
   clusters?: string;
   mustBe?: string;
   mustNot?: string;

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/service-logs-truncated-query-params.class.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/service-logs-truncated-query-params.class.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/service-logs-truncated-query-params.class.ts
new file mode 100644
index 0000000..da05cee
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/classes/queries/service-logs-truncated-query-params.class.ts
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import {QueryParams} from '@app/classes/queries/query-params.class';
+
+export const defaultParams = {
+  numberRows: '10',
+  scrollType: ''
+};
+
+export class ServiceLogsTruncatedQueryParams extends QueryParams {
+  constructor(options: ServiceLogsTruncatedQueryParams) {
+    const finalParams = Object.assign({}, defaultParams, options);
+    super(finalParams);
+  }
+  id: string;
+  host_name: string;
+  component_name: string;
+  numberRows: string;
+  scrollType: 'before' | 'after' | '';
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/classes/service-log-context-entry.class.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/classes/service-log-context-entry.class.ts b/ambari-logsearch/ambari-logsearch-web/src/app/classes/service-log-context-entry.class.ts
new file mode 100644
index 0000000..15c05fb
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/classes/service-log-context-entry.class.ts
@@ -0,0 +1,26 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+export interface ServiceLogContextEntry {
+  id: string;
+  time: number;
+  level: string;
+  message: string;
+  fileName: string | null;
+  lineNumber: number | null;
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-button/dropdown-button.component.html
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-button/dropdown-button.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-button/dropdown-button.component.html
index 9536573..798a609 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-button/dropdown-button.component.html
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-button/dropdown-button.component.html
@@ -26,5 +26,6 @@
     <span *ngIf="!hideCaret" class="caret"></span>
   </button>
   <ul data-component="dropdown-list" [ngClass]="{'dropdown-menu': true, 'dropdown-menu-right': isRightAlign}"
-      [items]="options" [isMultipleChoice]="isMultipleChoice" (selectedItemChange)="updateValue($event)"></ul>
+      [items]="options" [isMultipleChoice]="isMultipleChoice" (selectedItemChange)="updateValue($event)"
+      [actionArguments]="additionalArgs"></ul>
 </div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-button/dropdown-button.component.spec.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-button/dropdown-button.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-button/dropdown-button.component.spec.ts
index f7227b1..e795986 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-button/dropdown-button.component.spec.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-button/dropdown-button.component.spec.ts
@@ -30,6 +30,7 @@ import {AuditLogsFieldsService, auditLogsFields} from '@app/services/storage/aud
 import {ServiceLogsService, serviceLogs} from '@app/services/storage/service-logs.service';
 import {ServiceLogsFieldsService, serviceLogsFields} from '@app/services/storage/service-logs-fields.service';
 import {ServiceLogsHistogramDataService, serviceLogsHistogramData} from '@app/services/storage/service-logs-histogram-data.service';
+import {ServiceLogsTruncatedService, serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service';
 import {FilteringService} from '@app/services/filtering.service';
 import {UtilsService} from '@app/services/utils.service';
 import {ComponentActionsService} from '@app/services/component-actions.service';
@@ -56,7 +57,8 @@ describe('DropdownButtonComponent', () => {
           auditLogsFields,
           serviceLogs,
           serviceLogsFields,
-          serviceLogsHistogramData
+          serviceLogsHistogramData,
+          serviceLogsTruncated
         }),
         ...TranslationModules
       ],
@@ -71,6 +73,7 @@ describe('DropdownButtonComponent', () => {
         ServiceLogsService,
         ServiceLogsFieldsService,
         ServiceLogsHistogramDataService,
+        ServiceLogsTruncatedService,
         FilteringService,
         UtilsService,
         ComponentActionsService,

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-list/dropdown-list.component.html
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-list/dropdown-list.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-list/dropdown-list.component.html
index 316d3f9..5de78ad 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-list/dropdown-list.component.html
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-list/dropdown-list.component.html
@@ -26,7 +26,7 @@
     </label>
   </label>
   <span class="list-item-label label-container" *ngIf="!isMultipleChoice"
-        (click)="changeSelectedItem({value: item.value, label: item.label})">
+        (click)="changeSelectedItem(item)">
     <span *ngIf="item.iconClass" [ngClass]="item.iconClass"></span>
     {{item.label | translate}}
     <div #additionalComponent></div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-list/dropdown-list.component.spec.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-list/dropdown-list.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-list/dropdown-list.component.spec.ts
index eacac04..759a0e1 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-list/dropdown-list.component.spec.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-list/dropdown-list.component.spec.ts
@@ -26,12 +26,15 @@ import {AuditLogsFieldsService, auditLogsFields} from '@app/services/storage/aud
 import {ServiceLogsFieldsService, serviceLogsFields} from '@app/services/storage/service-logs-fields.service';
 import {ServiceLogsHistogramDataService, serviceLogsHistogramData} from '@app/services/storage/service-logs-histogram-data.service';
 import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service';
+import {AppStateService, appState} from '@app/services/storage/app-state.service';
 import {ClustersService, clusters} from '@app/services/storage/clusters.service';
 import {ComponentsService, components} from '@app/services/storage/components.service';
+import {ServiceLogsTruncatedService, serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service';
 import {ComponentGeneratorService} from '@app/services/component-generator.service';
 import {LogsContainerService} from '@app/services/logs-container.service';
 import {HttpClientService} from '@app/services/http-client.service';
 import {FilteringService} from '@app/services/filtering.service';
+import {ComponentActionsService} from '@app/services/component-actions.service';
 
 import {DropdownListComponent} from './dropdown-list.component';
 
@@ -60,8 +63,10 @@ describe('DropdownListComponent', () => {
           serviceLogsFields,
           serviceLogsHistogramData,
           appSettings,
+          appState,
           clusters,
-          components
+          components,
+          serviceLogsTruncated
         })
       ],
       providers: [
@@ -72,6 +77,7 @@ describe('DropdownListComponent', () => {
           useValue: httpClient
         },
         FilteringService,
+        ComponentActionsService,
         HostsService,
         AuditLogsService,
         ServiceLogsService,
@@ -79,8 +85,10 @@ describe('DropdownListComponent', () => {
         ServiceLogsFieldsService,
         ServiceLogsHistogramDataService,
         AppSettingsService,
+        AppStateService,
         ClustersService,
-        ComponentsService
+        ComponentsService,
+        ServiceLogsTruncatedService
       ]
     })
     .compileComponents();

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-list/dropdown-list.component.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-list/dropdown-list.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-list/dropdown-list.component.ts
index 82656cf..656c901 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-list/dropdown-list.component.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/dropdown-list/dropdown-list.component.ts
@@ -19,6 +19,7 @@
 import {Component, AfterViewInit, Input, Output, EventEmitter, ViewChildren, ViewContainerRef, QueryList} from '@angular/core';
 import {ListItem} from '@app/classes/list-item.class';
 import {ComponentGeneratorService} from '@app/services/component-generator.service';
+import {ComponentActionsService} from '@app/services/component-actions.service';
 
 @Component({
   selector: 'ul[data-component="dropdown-list"]',
@@ -27,7 +28,7 @@ import {ComponentGeneratorService} from '@app/services/component-generator.servi
 })
 export class DropdownListComponent implements AfterViewInit {
 
-  constructor(private componentGenerator: ComponentGeneratorService) {
+  constructor(private componentGenerator: ComponentGeneratorService, private actions: ComponentActionsService) {
   }
 
   ngAfterViewInit() {
@@ -49,6 +50,9 @@ export class DropdownListComponent implements AfterViewInit {
   @Input()
   additionalLabelComponentSetter?: string;
 
+  @Input()
+  actionArguments: any[] = [];
+
   @Output()
   selectedItemChange: EventEmitter<ListItem> = new EventEmitter();
 
@@ -58,6 +62,9 @@ export class DropdownListComponent implements AfterViewInit {
   containers: QueryList<ViewContainerRef>;
 
   changeSelectedItem(options: ListItem): void {
+    if (options.action) {
+      this.actions[options.action](...this.actionArguments);
+    }
     this.selectedItemChange.emit(options);
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-button/filter-button.component.spec.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-button/filter-button.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-button/filter-button.component.spec.ts
index a01a3f3..4e6f460 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-button/filter-button.component.spec.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-button/filter-button.component.spec.ts
@@ -30,6 +30,7 @@ import {AuditLogsFieldsService, auditLogsFields} from '@app/services/storage/aud
 import {ServiceLogsService, serviceLogs} from '@app/services/storage/service-logs.service';
 import {ServiceLogsFieldsService, serviceLogsFields} from '@app/services/storage/service-logs-fields.service';
 import {ServiceLogsHistogramDataService, serviceLogsHistogramData} from '@app/services/storage/service-logs-histogram-data.service';
+import {ServiceLogsTruncatedService, serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service';
 import {ComponentActionsService} from '@app/services/component-actions.service';
 import {FilteringService} from '@app/services/filtering.service';
 import {UtilsService} from '@app/services/utils.service';
@@ -56,7 +57,8 @@ describe('FilterButtonComponent', () => {
           auditLogsFields,
           serviceLogs,
           serviceLogsFields,
-          serviceLogsHistogramData
+          serviceLogsHistogramData,
+          serviceLogsTruncated
         }),
         ...TranslationModules
       ],
@@ -71,6 +73,7 @@ describe('FilterButtonComponent', () => {
         ServiceLogsService,
         ServiceLogsFieldsService,
         ServiceLogsHistogramDataService,
+        ServiceLogsTruncatedService,
         ComponentActionsService,
         FilteringService,
         UtilsService,

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-dropdown/filter-dropdown.component.spec.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-dropdown/filter-dropdown.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-dropdown/filter-dropdown.component.spec.ts
index 85e7ecb..f5b9330 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-dropdown/filter-dropdown.component.spec.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/filter-dropdown/filter-dropdown.component.spec.ts
@@ -26,6 +26,7 @@ import {AuditLogsFieldsService, auditLogsFields} from '@app/services/storage/aud
 import {ServiceLogsService, serviceLogs} from '@app/services/storage/service-logs.service';
 import {ServiceLogsFieldsService, serviceLogsFields} from '@app/services/storage/service-logs-fields.service';
 import {ServiceLogsHistogramDataService, serviceLogsHistogramData} from '@app/services/storage/service-logs-histogram-data.service';
+import {ServiceLogsTruncatedService, serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service';
 import {FilteringService} from '@app/services/filtering.service';
 import {UtilsService} from '@app/services/utils.service';
 import {ComponentActionsService} from '@app/services/component-actions.service';
@@ -65,7 +66,8 @@ describe('FilterDropdownComponent', () => {
           auditLogsFields,
           serviceLogs,
           serviceLogsFields,
-          serviceLogsHistogramData
+          serviceLogsHistogramData,
+          serviceLogsTruncated
         }),
         ...TranslationModules
       ],
@@ -77,6 +79,7 @@ describe('FilterDropdownComponent', () => {
         ServiceLogsService,
         ServiceLogsFieldsService,
         ServiceLogsHistogramDataService,
+        ServiceLogsTruncatedService,
         {
           provide: FilteringService,
           useValue: filtering

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.spec.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.spec.ts
index ae5a4af..0643ea6 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.spec.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/filters-panel/filters-panel.component.spec.ts
@@ -30,6 +30,7 @@ import {AuditLogsFieldsService, auditLogsFields} from '@app/services/storage/aud
 import {ServiceLogsFieldsService, serviceLogsFields} from '@app/services/storage/service-logs-fields.service';
 import {ServiceLogsHistogramDataService, serviceLogsHistogramData} from '@app/services/storage/service-logs-histogram-data.service';
 import {AppStateService, appState} from '@app/services/storage/app-state.service';
+import {ServiceLogsTruncatedService, serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service';
 import {FilteringService} from '@app/services/filtering.service';
 import {HttpClientService} from '@app/services/http-client.service';
 import {UtilsService} from '@app/services/utils.service';
@@ -67,7 +68,8 @@ describe('FiltersPanelComponent', () => {
           auditLogsFields,
           serviceLogsFields,
           serviceLogsHistogramData,
-          appState
+          appState,
+          serviceLogsTruncated
         }),
         ...TranslationModules
       ],
@@ -82,6 +84,7 @@ describe('FiltersPanelComponent', () => {
         ServiceLogsFieldsService,
         ServiceLogsHistogramDataService,
         AppStateService,
+        ServiceLogsTruncatedService,
         FilteringService,
         LogsContainerService,
         {

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/components/log-context/log-context.component.html
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-context/log-context.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/log-context/log-context.component.html
new file mode 100644
index 0000000..2e51e0b
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/log-context/log-context.component.html
@@ -0,0 +1,33 @@
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+
+<modal title="{{hostName}} -> {{componentName}}" submitButtonLabel="modal.close" [showCancelButton]="false"
+       [isLargeModal]="true" (init)="scrollToCurrentEntry()" (submit)="closeLogContext()" (close)="closeLogContext()">
+  <ng-template>
+    <button class="btn btn-primary" (click)="loadBefore()">
+      {{'logs.loadMore' | translate}} <span class="fa fa-arrow-up"></span>
+    </button>
+    <div class="logs">
+      <log-file-entry *ngFor="let log of logs | async" [ngClass]="log.id === id ? currentLogClassName : ''"
+                   [time]="log.time" [level]="log.level" [fileName]="log.fileName" [lineNumber]="log.lineNumber"
+                   [message]="log.message"></log-file-entry>
+    </div>
+    <button class="btn btn-primary" (click)="loadAfter()">
+      {{'logs.loadMore' | translate}} <span class="fa fa-arrow-down"></span>
+    </button>
+  </ng-template>
+</modal>

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/components/log-context/log-context.component.less
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-context/log-context.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/log-context/log-context.component.less
new file mode 100644
index 0000000..235853b
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/log-context/log-context.component.less
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+@import '../variables';
+
+.logs {
+  max-height: @dropdown-max-height; // TODO implement actual styles
+  overflow-y: auto;
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/components/log-context/log-context.component.spec.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-context/log-context.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/log-context/log-context.component.spec.ts
new file mode 100644
index 0000000..c21750a
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/log-context/log-context.component.spec.ts
@@ -0,0 +1,108 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import {CUSTOM_ELEMENTS_SCHEMA} from '@angular/core';
+import {async, ComponentFixture, TestBed} from '@angular/core/testing';
+import {StoreModule} from '@ngrx/store';
+import {AuditLogsService, auditLogs} from '@app/services/storage/audit-logs.service';
+import {ServiceLogsService, serviceLogs} from '@app/services/storage/service-logs.service';
+import {AuditLogsFieldsService, auditLogsFields} from '@app/services/storage/audit-logs-fields.service';
+import {ServiceLogsFieldsService, serviceLogsFields} from '@app/services/storage/service-logs-fields.service';
+import {ServiceLogsHistogramDataService, serviceLogsHistogramData} from '@app/services/storage/service-logs-histogram-data.service';
+import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service';
+import {AppStateService, appState} from '@app/services/storage/app-state.service';
+import {ClustersService, clusters} from '@app/services/storage/clusters.service';
+import {ComponentsService, components} from '@app/services/storage/components.service';
+import {HostsService, hosts} from '@app/services/storage/hosts.service';
+import {ServiceLogsTruncatedService, serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service';
+import {TranslationModules} from '@app/test-config.spec';
+import {ModalComponent} from '@app/components/modal/modal.component';
+import {LogsContainerService} from '@app/services/logs-container.service';
+import {HttpClientService} from '@app/services/http-client.service';
+import {FilteringService} from '@app/services/filtering.service';
+
+import {LogContextComponent} from './log-context.component';
+
+describe('LogContextComponent', () => {
+  let component: LogContextComponent;
+  let fixture: ComponentFixture<LogContextComponent>;
+
+  beforeEach(async(() => {
+    const httpClient = {
+      get: () => {
+        return {
+          subscribe: () => {
+          }
+        }
+      }
+    };
+    TestBed.configureTestingModule({
+      declarations: [
+        LogContextComponent,
+        ModalComponent
+      ],
+      imports: [
+        StoreModule.provideStore({
+          auditLogs,
+          serviceLogs,
+          auditLogsFields,
+          serviceLogsFields,
+          serviceLogsHistogramData,
+          appSettings,
+          appState,
+          clusters,
+          components,
+          hosts,
+          serviceLogsTruncated
+        }),
+        ...TranslationModules
+      ],
+      providers: [
+        AuditLogsService,
+        ServiceLogsService,
+        AuditLogsFieldsService,
+        ServiceLogsFieldsService,
+        ServiceLogsHistogramDataService,
+        AppSettingsService,
+        AppStateService,
+        ClustersService,
+        ComponentsService,
+        HostsService,
+        ServiceLogsTruncatedService,
+        LogsContainerService,
+        {
+          provide: HttpClientService,
+          useValue: httpClient
+        },
+        FilteringService
+      ],
+      schemas: [CUSTOM_ELEMENTS_SCHEMA]
+    })
+    .compileComponents();
+  }));
+
+  beforeEach(() => {
+    fixture = TestBed.createComponent(LogContextComponent);
+    component = fixture.componentInstance;
+    component.scrollToCurrentEntry = () => {};
+    fixture.detectChanges();
+  });
+
+  it('should create component', () => {
+    expect(component).toBeTruthy();
+  });
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/components/log-context/log-context.component.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-context/log-context.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/log-context/log-context.component.ts
new file mode 100644
index 0000000..467de98
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/log-context/log-context.component.ts
@@ -0,0 +1,91 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import {Component, Input, ElementRef} from '@angular/core';
+import {Observable} from 'rxjs/Observable';
+import 'rxjs/add/operator/map';
+import {LogsContainerService} from '@app/services/logs-container.service';
+import {ServiceLogsTruncatedService} from '@app/services/storage/service-logs-truncated.service';
+import {AppStateService} from '@app/services/storage/app-state.service';
+import {ServiceLog} from '@app/models/service-log.model';
+import {ServiceLogContextEntry} from '@app/classes/service-log-context-entry.class';
+
+@Component({
+  selector: 'log-context',
+  templateUrl: './log-context.component.html',
+  styleUrls: ['./log-context.component.less']
+})
+export class LogContextComponent {
+
+  constructor(private element: ElementRef, private logsContainer: LogsContainerService, private serviceLogsTruncatedStorage: ServiceLogsTruncatedService, private appState: AppStateService) {
+  }
+
+  @Input()
+  id: string;
+
+  @Input()
+  hostName: string;
+
+  @Input()
+  componentName: string;
+
+  readonly currentLogClassName: string = 'alert-warning'; // TODO implement custom class name with actual styles
+
+  firstEntryId: string;
+
+  lastEntryId: string;
+
+  logs: Observable<ServiceLogContextEntry[]> = this.serviceLogsTruncatedStorage.getAll().map((logs: ServiceLog[]): ServiceLogContextEntry[] => {
+    if (logs.length) {
+      this.firstEntryId = logs[0].id;
+      this.lastEntryId = logs[logs.length - 1].id;
+    }
+    return logs.map((log: ServiceLog): ServiceLogContextEntry => {
+      return {
+        id: log.id,
+        time: log.logtime,
+        level: log.level,
+        message: log.log_message,
+        fileName: log.file,
+        lineNumber: log.line_number
+      };
+    });
+  });
+
+  closeLogContext(): void {
+    this.appState.setParameters({
+      isServiceLogContextView: false,
+      activeLog: null
+    });
+    this.serviceLogsTruncatedStorage.clear();
+    this.firstEntryId = '';
+    this.lastEntryId = '';
+  }
+
+  scrollToCurrentEntry() {
+    this.element.nativeElement.getElementsByClassName(this.currentLogClassName).item(0).scrollIntoView();
+  }
+
+  loadBefore(): void {
+    this.logsContainer.loadLogContext(this.firstEntryId, this.hostName, this.componentName, 'before');
+  }
+
+  loadAfter(): void {
+    this.logsContainer.loadLogContext(this.lastEntryId, this.hostName, this.componentName, 'after');
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/components/log-file-entry/log-file-entry.component.html
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-file-entry/log-file-entry.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/log-file-entry/log-file-entry.component.html
new file mode 100644
index 0000000..7d4c296
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/log-file-entry/log-file-entry.component.html
@@ -0,0 +1,20 @@
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+
+<div class="log">{{time | amTz: timeZone |amDateFormat: timeFormat}} <span
+  class="{{'log-level ' + level.toLowerCase()}}">{{level}}</span><span *ngIf="fileName"> {{fileName}}<span
+  *ngIf="lineNumber">:{{lineNumber}}</span></span> - {{message}}</div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/components/log-file-entry/log-file-entry.component.less
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-file-entry/log-file-entry.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/log-file-entry/log-file-entry.component.less
new file mode 100644
index 0000000..d3523d3
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/log-file-entry/log-file-entry.component.less
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+@import '../variables';
+
+:host {
+  display: block;
+
+  .log {
+    font-family: monospace;
+    white-space: pre-wrap;
+
+    .log-level {
+      .log-colors;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/components/log-file-entry/log-file-entry.component.spec.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-file-entry/log-file-entry.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/log-file-entry/log-file-entry.component.spec.ts
new file mode 100644
index 0000000..0ae7e67
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/log-file-entry/log-file-entry.component.spec.ts
@@ -0,0 +1,56 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import {async, ComponentFixture, TestBed} from '@angular/core/testing';
+import {MomentModule} from 'angular2-moment';
+import {MomentTimezoneModule} from 'angular-moment-timezone';
+import {StoreModule} from '@ngrx/store';
+import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service';
+
+import {LogFileEntryComponent} from './log-file-entry.component';
+
+describe('LogFileEntryComponent', () => {
+  let component: LogFileEntryComponent;
+  let fixture: ComponentFixture<LogFileEntryComponent>;
+
+  beforeEach(async(() => {
+    TestBed.configureTestingModule({
+      declarations: [LogFileEntryComponent],
+      imports: [
+        StoreModule.provideStore({
+          appSettings
+        }),
+        MomentModule,
+        MomentTimezoneModule
+      ],
+      providers: [
+        AppSettingsService
+      ]
+    })
+    .compileComponents();
+  }));
+
+  beforeEach(() => {
+    fixture = TestBed.createComponent(LogFileEntryComponent);
+    component = fixture.componentInstance;
+    fixture.detectChanges();
+  });
+
+  it('should create component', () => {
+    expect(component).toBeTruthy();
+  });
+});

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/components/log-file-entry/log-file-entry.component.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/log-file-entry/log-file-entry.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/log-file-entry/log-file-entry.component.ts
new file mode 100644
index 0000000..c0a7393
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/log-file-entry/log-file-entry.component.ts
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import {Component, Input} from '@angular/core';
+import {AppSettingsService} from '@app/services/storage/app-settings.service';
+
+@Component({
+  selector: 'log-file-entry',
+  templateUrl: './log-file-entry.component.html',
+  styleUrls: ['./log-file-entry.component.less']
+})
+export class LogFileEntryComponent {
+
+  constructor(private appSettings: AppSettingsService) {
+    appSettings.getParameter('timeZone').subscribe((value: string) => this.timeZone = value);
+  }
+
+  @Input()
+  time: string = '';
+
+  @Input()
+  level: string = '';
+
+  @Input()
+  fileName?: string;
+
+  @Input()
+  lineNumber?: string;
+
+  @Input()
+  message: string = '';
+
+  readonly timeFormat: string = 'YYYY-MM-DD HH:mm:ss,SSS';
+
+  timeZone: string;
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-container/logs-container.component.html
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-container/logs-container.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-container/logs-container.component.html
index 5145b76..9c6c336 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-container/logs-container.component.html
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-container/logs-container.component.html
@@ -21,7 +21,9 @@
   </div>
 </div>
 <time-histogram class="col-md-12" [data]="histogramData" [customOptions]="histogramOptions"></time-histogram>
-<dropdown-button class="pull-right" label="logs.columns" [options]="availableColumns | async" [isRightAlign]="true"
-                 isMultipleChoice="true" action="updateSelectedColumns"
-                 [additionalArgs]="logsTypeMapObject.fieldsModel"></dropdown-button>
+<dropdown-button *ngIf="!isServiceLogsFileView" class="pull-right" label="logs.columns"
+                 [options]="availableColumns | async" [isRightAlign]="true" [isMultipleChoice]="true"
+                 action="updateSelectedColumns" [additionalArgs]="logsTypeMapObject.fieldsModel"></dropdown-button>
 <logs-list [logs]="logs | async" [totalCount]="totalCount" [displayedColumns]="displayedColumns"></logs-list>
+<log-context *ngIf="isServiceLogContextView" [hostName]="activeLog.host_name" [componentName]="activeLog.component_name"
+             [id]="activeLog.id"></log-context>

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-container/logs-container.component.spec.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-container/logs-container.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-container/logs-container.component.spec.ts
index 811c6e6..f3b28d1 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-container/logs-container.component.spec.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-container/logs-container.component.spec.ts
@@ -30,6 +30,7 @@ import {ServiceLogsService, serviceLogs} from '@app/services/storage/service-log
 import {ServiceLogsFieldsService, serviceLogsFields} from '@app/services/storage/service-logs-fields.service';
 import {ServiceLogsHistogramDataService, serviceLogsHistogramData} from '@app/services/storage/service-logs-histogram-data.service';
 import {HostsService, hosts} from '@app/services/storage/hosts.service';
+import {ServiceLogsTruncatedService, serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service';
 import {HttpClientService} from '@app/services/http-client.service';
 import {FilteringService} from '@app/services/filtering.service';
 import {UtilsService} from '@app/services/utils.service';
@@ -63,7 +64,8 @@ describe('LogsContainerComponent', () => {
           serviceLogs,
           serviceLogsFields,
           serviceLogsHistogramData,
-          hosts
+          hosts,
+          serviceLogsTruncated
         }),
         ...TranslationModules
       ],
@@ -82,6 +84,7 @@ describe('LogsContainerComponent', () => {
         ServiceLogsFieldsService,
         ServiceLogsHistogramDataService,
         HostsService,
+        ServiceLogsTruncatedService,
         FilteringService,
         UtilsService,
         LogsContainerService

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-container/logs-container.component.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-container/logs-container.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-container/logs-container.component.ts
index b1fad17..fd3a58b 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-container/logs-container.component.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-container/logs-container.component.ts
@@ -27,6 +27,7 @@ import {AppStateService} from '@app/services/storage/app-state.service';
 import {AuditLog} from '@app/models/audit-log.model';
 import {ServiceLog} from '@app/models/service-log.model';
 import {LogField} from '@app/models/log-field.model';
+import {ActiveServiceLogEntry} from '@app/classes/active-service-log-entry.class';
 
 @Component({
   selector: 'logs-container',
@@ -37,12 +38,13 @@ export class LogsContainerComponent implements OnInit {
 
   constructor(private serviceLogsHistogramStorage: ServiceLogsHistogramDataService, private appState: AppStateService, private filtering: FilteringService, private logsContainer: LogsContainerService) {
     serviceLogsHistogramStorage.getAll().subscribe(data => this.histogramData = this.logsContainer.getHistogramData(data));
+    appState.getParameter('isServiceLogContextView').subscribe((value: boolean) => this.isServiceLogContextView = value);
   }
 
   ngOnInit() {
     const fieldsModel = this.logsTypeMapObject.fieldsModel,
       logsModel = this.logsTypeMapObject.logsModel;
-    this.appState.getParameter(this.logsTypeMapObject.isSetFlag).subscribe(value => this.isLogsSet = value);
+    this.appState.getParameter(this.logsTypeMapObject.isSetFlag).subscribe((value: boolean) => this.isLogsSet = value);
     this.availableColumns = fieldsModel.getAll().map(fields => {
       return fields.filter(field => field.isAvailable).map(field => {
         return {
@@ -56,15 +58,13 @@ export class LogsContainerComponent implements OnInit {
       const availableFields = columns.filter(field => field.isAvailable),
         availableNames = availableFields.map(field => field.name);
       if (availableNames.length && !this.isLogsSet) {
-        this.logs = logsModel.getAll().map(logs => logs.map(log => {
-          let logObject = availableNames.reduce((obj, key) => Object.assign(obj, {
-            [key]: log[key]
-          }), {});
-          if (logObject.level) {
-            logObject.className = logObject.level.toLowerCase();
-          }
-          return logObject;
-        }));
+        this.logs = logsModel.getAll().map((logs: (AuditLog | ServiceLog)[]): (AuditLog | ServiceLog)[] => {
+          return logs.map((log: AuditLog | ServiceLog): AuditLog | ServiceLog => {
+            return availableNames.reduce((obj, key) => Object.assign(obj, {
+              [key]: log[key]
+            }), {});
+          });
+        });
         this.appState.setParameter(this.logsTypeMapObject.isSetFlag, true);
       }
       this.displayedColumns = columns.filter(column => column.isAvailable && column.isDisplayed);
@@ -112,4 +112,13 @@ export class LogsContainerComponent implements OnInit {
     };
   }
 
+  isServiceLogContextView: boolean = false;
+
+  get isServiceLogsFileView(): boolean {
+    return this.logsContainer.isServiceLogsFileView;
+  };
+
+  get activeLog(): ActiveServiceLogEntry | null {
+    return this.logsContainer.activeLog;
+  };
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-list/logs-list.component.html
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-list/logs-list.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-list/logs-list.component.html
index 2942b20..b27eb69 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-list/logs-list.component.html
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-list/logs-list.component.html
@@ -19,20 +19,20 @@
   <filter-dropdown [label]="filters.sorting.label" formControlName="sorting" [options]="filters.sorting.options"
                    [defaultLabel]="filters.sorting.defaultLabel" [isRightAlign]="true"></filter-dropdown>
 </form>
-<div class="col-md-12 text-center" *ngIf="logs && logs.length">
-  <div class="logs-header">
-    <div class="col-md-1">{{'logs.status' | translate}}</div>
-    <div class="col-md-11">{{'logs.details' | translate}}</div>
-  </div>
-</div>
 <div *ngFor="let log of logs; let i = index">
-  <div *ngIf="i === 0 || isDifferentDates(log.logtime, logs[i - 1].logtime)" class="col-md-12">
+  <div *ngIf="!isServiceLogsFileView && (i === 0 || isDifferentDates(log.logtime, logs[i - 1].logtime))" class="col-md-12">
     <div class="logs-header">{{log.logtime | amTz: timeZone | amDateFormat: dateFormat}}</div>
   </div>
-  <accordion-panel [toggleId]="'details-' + i" class="col-md-12">
+  <accordion-panel *ngIf="!isServiceLogsFileView" [toggleId]="'details-' + i" class="col-md-12">
     <ng-template>
-      <div *ngIf="isColumnDisplayed('level')" [ngClass]="'hexagon ' + log.className"></div>
-      <div *ngIf="isColumnDisplayed('level')" [ngClass]="'col-md-1 log-status ' + log.className">{{log.level}}</div>
+      <div *ngIf="isColumnDisplayed('level')" [ngClass]="'hexagon ' + log.level.toLowerCase()"></div>
+      <div class="col-md-1">
+        <dropdown-button iconClass="fa fa-ellipsis-h" [hideCaret]="true" [options]="logActions"
+                         [additionalArgs]="[log]"></dropdown-button>
+      </div>
+      <div *ngIf="isColumnDisplayed('level')" [ngClass]="'col-md-1 log-status ' + log.level.toLowerCase()">
+        {{log.level}}
+      </div>
       <div *ngIf="isColumnDisplayed('type') || isColumnDisplayed('logtime')" class="col-md-3">
         <div *ngIf="isColumnDisplayed('type')" class="log-type">{{log.type}}</div>
         <time *ngIf="isColumnDisplayed('logtime')" class="log-time">
@@ -41,9 +41,7 @@
       </div>
       <div class="col-md-6 log-content-wrapper">
         <div class="collapse log-actions" attr.id="details-{{i}}">
-          <span class="action-icon fa fa-search"></span>
-          <span class="action-icon fa fa-external-link"></span>
-          <span class="action-icon fa fa-crosshairs"></span>
+          <!-- TODO remove after restyling the table -->
         </div>
         <div class="log-content-inner-wrapper">
           <div class="log-content" *ngIf="isColumnDisplayed('log_message')"
@@ -58,8 +56,10 @@
       </div>
     </ng-template>
   </accordion-panel>
+  <log-file-entry *ngIf="isServiceLogsFileView" class="col-md-12" [time]="log.logtime" [level]="log.level"
+                  [fileName]="log.file" [lineNumber]="log.line_number" [message]="log.log_message"></log-file-entry>
 </div>
-<ul #contextmenu data-component="dropdown-list" class="dropdown-menu context-menu" [items]="contextMenuItems"
-    (selectedItemChange)="updateQuery($event)"></ul>
+<ul #contextmenu *ngIf="!isServiceLogsFileView" data-component="dropdown-list" class="dropdown-menu context-menu"
+    [items]="contextMenuItems" (selectedItemChange)="updateQuery($event)"></ul>
 <pagination class="col-md-12" *ngIf="logs && logs.length" [totalCount]="totalCount" [filtersForm]="filtersForm"
             [filterInstance]="filters.pageSize" [currentCount]="logs.length"></pagination>

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-list/logs-list.component.less
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-list/logs-list.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-list/logs-list.component.less
index 577043f..0fded67 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-list/logs-list.component.less
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-list/logs-list.component.less
@@ -30,7 +30,7 @@
 }
 
 .hexagon {
-  // TODO get rid of magic numbers, base on actual design
+  // TODO remove, since it's not a part of updated design
   left: -7.5px;
 
   &.fatal {
@@ -64,34 +64,7 @@
 
 .log-status {
   text-transform: uppercase;
-
-  &.fatal {
-    color: @fatal-color;
-  }
-
-  &.error {
-    color: @error-color;
-  }
-
-  &.warn {
-    color: @warning-color;
-  }
-
-  &.info {
-    color: @info-color;
-  }
-
-  &.debug {
-    color: @debug-color;
-  }
-
-  &.trace {
-    color: @trace-color;
-  }
-
-  &.unknown {
-    color: @unknown-color;
-  }
+  .log-colors;
 }
 
 .log-type {
@@ -117,11 +90,6 @@
   }
 
   .log-actions {
-    position: absolute;
-    right: 40px;
-    top: 0;
-    border: @input-border;
-
     &.collapsing + .log-content-inner-wrapper, &.collapse.in + .log-content-inner-wrapper {
       min-height: 6em;
       max-height: none;

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-list/logs-list.component.spec.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-list/logs-list.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-list/logs-list.component.spec.ts
index 8c67a13..8ee4ca3 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-list/logs-list.component.spec.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-list/logs-list.component.spec.ts
@@ -24,6 +24,7 @@ import {MomentTimezoneModule} from 'angular-moment-timezone';
 import {AuditLogsService, auditLogs} from '@app/services/storage/audit-logs.service';
 import {ServiceLogsService, serviceLogs} from '@app/services/storage/service-logs.service';
 import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service';
+import {AppStateService, appState} from '@app/services/storage/app-state.service';
 import {ClustersService, clusters} from '@app/services/storage/clusters.service';
 import {ComponentsService, components} from '@app/services/storage/components.service';
 import {HostsService, hosts} from '@app/services/storage/hosts.service';
@@ -53,6 +54,7 @@ describe('LogsListComponent', () => {
           auditLogs,
           serviceLogs,
           appSettings,
+          appState,
           clusters,
           components,
           hosts
@@ -69,6 +71,7 @@ describe('LogsListComponent', () => {
         AuditLogsService,
         ServiceLogsService,
         AppSettingsService,
+        AppStateService,
         ClustersService,
         ComponentsService,
         HostsService,

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-list/logs-list.component.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-list/logs-list.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-list/logs-list.component.ts
index aeb55da..c94b967 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-list/logs-list.component.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/logs-list/logs-list.component.ts
@@ -15,28 +15,32 @@
  * limitations under the License.
  */
 
-import {Component, OnInit, Input, ViewChild, ElementRef} from '@angular/core';
+import {Component, AfterViewInit, Input, ViewChild, ElementRef} from '@angular/core';
 import {FormGroup} from '@angular/forms';
 import 'rxjs/add/operator/map';
+import {AppStateService} from '@app/services/storage/app-state.service';
 import {FilteringService} from '@app/services/filtering.service';
 import {UtilsService} from '@app/services/utils.service';
+import {AuditLog} from '@app/models/audit-log.model';
+import {ServiceLog} from '@app/models/service-log.model';
 
 @Component({
   selector: 'logs-list',
   templateUrl: './logs-list.component.html',
   styleUrls: ['./logs-list.component.less']
 })
-export class LogsListComponent implements OnInit {
+export class LogsListComponent implements AfterViewInit {
 
-  constructor(private filtering: FilteringService, private utils: UtilsService) {
+  constructor(private filtering: FilteringService, private utils: UtilsService, private appState: AppStateService) {
+    appState.getParameter('isServiceLogsFileView').subscribe((value: boolean) => this.isServiceLogsFileView = value);
   }
 
-  ngOnInit() {
+  ngAfterViewInit() {
     this.contextMenuElement = this.contextMenu.nativeElement;
   }
 
   @Input()
-  logs: any[] = [];
+  logs: (AuditLog| ServiceLog)[] = [];
 
   @Input()
   totalCount: number = 0;
@@ -70,6 +74,24 @@ export class LogsListComponent implements OnInit {
     }
   ];
 
+  readonly logActions = [
+    {
+      label: 'logs.copy',
+      iconClass: 'fa fa-files-o',
+      action: 'copyLog'
+    },
+    {
+      label: 'logs.open',
+      iconClass: 'fa fa-external-link',
+      action: 'openLog'
+    },
+    {
+      label: 'logs.context',
+      iconClass: 'fa fa-crosshairs',
+      action: 'openContext'
+    }
+  ];
+
   readonly dateFormat: string = 'dddd, MMMM Do';
 
   readonly timeFormat: string = 'h:mm:ss A';
@@ -86,6 +108,8 @@ export class LogsListComponent implements OnInit {
     return this.filtering.filtersForm;
   }
 
+  isServiceLogsFileView: boolean = false;
+
   isDifferentDates(dateA, dateB): boolean {
     return this.utils.isDifferentDates(dateA, dateB, this.timeZone);
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/components/main-container/main-container.component.html
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/main-container/main-container.component.html b/ambari-logsearch/ambari-logsearch-web/src/app/components/main-container/main-container.component.html
index 69b3887..7e3621a 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/main-container/main-container.component.html
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/main-container/main-container.component.html
@@ -20,5 +20,12 @@
   <span class="fa fa-spinner fa-spin"></span>
 </div>
 <login-form *ngIf="!isInitialLoading && !isAuthorized"></login-form>
+
+<!-- TODO implement tabs: Service Logs/Audit Logs/active file -->
+<div *ngIf="isServiceLogsFileView" class="col-md-12 logs-header">
+  {{activeLogHostName}} &gt;&gt; {{activeLogComponentName}}
+  <span class="fa fa-times close-icon" (click)="closeLog()"></span>
+</div>
+
 <filters-panel *ngIf="isAuthorized" class="row"></filters-panel>
 <logs-container *ngIf="isAuthorized" logsType="serviceLogs"></logs-container>

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/components/main-container/main-container.component.less
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/main-container/main-container.component.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/main-container/main-container.component.less
index 9736628..f7dcc05 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/main-container/main-container.component.less
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/main-container/main-container.component.less
@@ -21,4 +21,8 @@
 :host {
   .full-size;
   overflow-x: hidden;
+
+  .close-icon {
+    .clickable-item;
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/components/main-container/main-container.component.spec.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/main-container/main-container.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/main-container/main-container.component.spec.ts
index 42fba68..bbbebdf 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/main-container/main-container.component.spec.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/main-container/main-container.component.spec.ts
@@ -32,6 +32,14 @@ describe('MainContainerComponent', () => {
   let fixture: ComponentFixture<MainContainerComponent>;
 
   beforeEach(async(() => {
+    const httpClient = {
+      get: () => {
+        return {
+          subscribe: () => {
+          }
+        }
+      }
+    };
     TestBed.configureTestingModule({
       declarations: [MainContainerComponent],
       imports: [
@@ -47,7 +55,10 @@ describe('MainContainerComponent', () => {
         AppStateService,
         AuditLogsFieldsService,
         ServiceLogsFieldsService,
-        HttpClientService
+        {
+          provide: HttpClientService,
+          useValue: httpClient
+        }
       ]
     })
     .compileComponents();

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/components/main-container/main-container.component.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/main-container/main-container.component.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/main-container/main-container.component.ts
index 53d58cf..32fe1cf 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/main-container/main-container.component.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/main-container/main-container.component.ts
@@ -23,6 +23,7 @@ import {AuditLogsFieldsService} from '@app/services/storage/audit-logs-fields.se
 import {ServiceLogsFieldsService} from '@app/services/storage/service-logs-fields.service';
 import {AuditLogField} from '@app/models/audit-log-field.model';
 import {ServiceLogField} from '@app/models/service-log-field.model';
+import {ActiveServiceLogEntry} from '@app/classes/active-service-log-entry.class';
 
 @Component({
   selector: 'main-container',
@@ -33,8 +34,18 @@ export class MainContainerComponent {
 
   constructor(private httpClient: HttpClientService, private appState: AppStateService, private auditLogsFieldsStorage: AuditLogsFieldsService, private serviceLogsFieldsStorage: ServiceLogsFieldsService) {
     this.loadColumnsNames();
-    appState.getParameter('isAuthorized').subscribe(value => this.isAuthorized = value);
-    appState.getParameter('isInitialLoading').subscribe(value => this.isInitialLoading = value);
+    appState.getParameter('isAuthorized').subscribe((value: boolean) => this.isAuthorized = value);
+    appState.getParameter('isInitialLoading').subscribe((value: boolean) => this.isInitialLoading = value);
+    appState.getParameter('isServiceLogsFileView').subscribe((value: boolean) => this.isServiceLogsFileView = value);
+    appState.getParameter('activeLog').subscribe((value: ActiveServiceLogEntry | null) => {
+      if (value) {
+        this.activeLogHostName = value.host_name;
+        this.activeLogComponentName = value.component_name;
+      } else {
+        this.activeLogHostName = '';
+        this.activeLogComponentName = '';
+      }
+    });
   }
 
   @ContentChild(TemplateRef)
@@ -44,6 +55,12 @@ export class MainContainerComponent {
 
   isInitialLoading: boolean = false;
 
+  isServiceLogsFileView: boolean = false;
+
+  activeLogHostName: string = '';
+
+  activeLogComponentName: string = '';
+
   private loadColumnsNames(): void {
     this.httpClient.get('serviceLogsFields').subscribe(response => {
       const jsonResponse = response.json();
@@ -63,4 +80,11 @@ export class MainContainerComponent {
     return Object.keys(keysObject).map(key => new fieldClass(key));
   }
 
+  closeLog(): void {
+    this.appState.setParameters({
+      isServiceLogsFileView: false,
+      activeLog: null
+    });
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/components/menu-button/menu-button.component.spec.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/menu-button/menu-button.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/menu-button/menu-button.component.spec.ts
index f92961e..5414f4f 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/menu-button/menu-button.component.spec.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/menu-button/menu-button.component.spec.ts
@@ -30,6 +30,7 @@ import {AuditLogsFieldsService, auditLogsFields} from '@app/services/storage/aud
 import {ServiceLogsService, serviceLogs} from '@app/services/storage/service-logs.service';
 import {ServiceLogsFieldsService, serviceLogsFields} from '@app/services/storage/service-logs-fields.service';
 import {ServiceLogsHistogramDataService, serviceLogsHistogramData} from '@app/services/storage/service-logs-histogram-data.service';
+import {ServiceLogsTruncatedService, serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service';
 import {ComponentActionsService} from '@app/services/component-actions.service';
 import {FilteringService} from '@app/services/filtering.service';
 import {HttpClientService} from '@app/services/http-client.service';
@@ -55,7 +56,8 @@ describe('MenuButtonComponent', () => {
           auditLogsFields,
           serviceLogs,
           serviceLogsFields,
-          serviceLogsHistogramData
+          serviceLogsHistogramData,
+          serviceLogsTruncated
         }),
         ...TranslationModules
       ],
@@ -70,6 +72,7 @@ describe('MenuButtonComponent', () => {
         ServiceLogsService,
         ServiceLogsFieldsService,
         ServiceLogsHistogramDataService,
+        ServiceLogsTruncatedService,
         ComponentActionsService,
         FilteringService,
         HttpClientService,

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/components/timezone-picker/timezone-picker.component.spec.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/timezone-picker/timezone-picker.component.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/components/timezone-picker/timezone-picker.component.spec.ts
index 7d1e907..7105624 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/timezone-picker/timezone-picker.component.spec.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/timezone-picker/timezone-picker.component.spec.ts
@@ -29,6 +29,7 @@ import {AuditLogsFieldsService, auditLogsFields} from '@app/services/storage/aud
 import {ServiceLogsService, serviceLogs} from '@app/services/storage/service-logs.service';
 import {ServiceLogsFieldsService, serviceLogsFields} from '@app/services/storage/service-logs-fields.service';
 import {ServiceLogsHistogramDataService, serviceLogsHistogramData} from '@app/services/storage/service-logs-histogram-data.service';
+import {ServiceLogsTruncatedService, serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service';
 import {ComponentActionsService} from '@app/services/component-actions.service';
 import {FilteringService} from '@app/services/filtering.service';
 import {HttpClientService} from '@app/services/http-client.service';
@@ -60,7 +61,8 @@ describe('TimeZonePickerComponent', () => {
           auditLogsFields,
           serviceLogs,
           serviceLogsFields,
-          serviceLogsHistogramData
+          serviceLogsHistogramData,
+          serviceLogsTruncated
         }),
         ...TranslationModules
       ],
@@ -75,6 +77,7 @@ describe('TimeZonePickerComponent', () => {
         ServiceLogsService,
         ServiceLogsFieldsService,
         ServiceLogsHistogramDataService,
+        ServiceLogsTruncatedService,
         ComponentActionsService,
         FilteringService,
         HttpClientService,

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/components/variables.less
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/components/variables.less b/ambari-logsearch/ambari-logsearch-web/src/app/components/variables.less
index 88b0c91..2dc6278 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/components/variables.less
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/components/variables.less
@@ -138,3 +138,33 @@
     background-color: #F5F5F5;
   }
 }
+
+.log-colors {
+  &.fatal {
+    color: @fatal-color;
+  }
+
+  &.error {
+    color: @error-color;
+  }
+
+  &.warn {
+    color: @warning-color;
+  }
+
+  &.info {
+    color: @info-color;
+  }
+
+  &.debug {
+    color: @debug-color;
+  }
+
+  &.trace {
+    color: @trace-color;
+  }
+
+  &.unknown {
+    color: @unknown-color;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/models/app-state.model.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/models/app-state.model.ts b/ambari-logsearch/ambari-logsearch-web/src/app/models/app-state.model.ts
index 28ae763..267bf15 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/models/app-state.model.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/models/app-state.model.ts
@@ -16,6 +16,8 @@
  * limitations under the License.
  */
 
+import {ActiveServiceLogEntry} from '@app/classes/active-service-log-entry.class';
+
 export interface AppState {
   isAuthorized: boolean;
   isInitialLoading: boolean;
@@ -23,6 +25,9 @@ export interface AppState {
   isAuditLogsSet: boolean;
   isServiceLogsSet: boolean;
   activeLogsType?: string;
+  isServiceLogsFileView: boolean;
+  isServiceLogContextView: boolean;
+  activeLog: ActiveServiceLogEntry | null;
 }
 
 export const initialState: AppState = {
@@ -31,5 +36,8 @@ export const initialState: AppState = {
   isLoginInProgress: false,
   isAuditLogsSet: false,
   isServiceLogsSet: false,
-  activeLogsType: 'serviceLogs' // TODO implement setting the parameter depending on user's navigation
+  activeLogsType: 'serviceLogs', // TODO implement setting the parameter depending on user's navigation
+  isServiceLogsFileView: false,
+  isServiceLogContextView: false,
+  activeLog: null
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/models/bar-graph.model.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/models/bar-graph.model.ts b/ambari-logsearch/ambari-logsearch-web/src/app/models/bar-graph.model.ts
index a197bf5..6c9a049 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/models/bar-graph.model.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/models/bar-graph.model.ts
@@ -19,6 +19,6 @@
 import {CommonEntry} from '@app/models/common-entry.model';
 
 export interface BarGraph {
-  dataCount: CommonEntry[],
+  dataCount: CommonEntry[];
   name: string;
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/models/graph.model.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/models/graph.model.ts b/ambari-logsearch/ambari-logsearch-web/src/app/models/graph.model.ts
index 04966b2..be31f19 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/models/graph.model.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/models/graph.model.ts
@@ -19,5 +19,5 @@
 export interface Graph {
   name: string;
   count: string;
-  dataList?: Graph[]
+  dataList?: Graph[];
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/models/log.model.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/models/log.model.ts b/ambari-logsearch/ambari-logsearch-web/src/app/models/log.model.ts
index 188bbd2..c598e41 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/models/log.model.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/models/log.model.ts
@@ -26,6 +26,7 @@ export interface Log {
   case_id?: string;
   log_message: string;
   logfile_line_number: number;
+  line_number?: number;
   message_md5: string;
   cluster: string;
   event_count: number;

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/models/store.model.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/models/store.model.ts b/ambari-logsearch/ambari-logsearch-web/src/app/models/store.model.ts
index 31d52b3..a6a084f 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/models/store.model.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/models/store.model.ts
@@ -32,6 +32,7 @@ import {ServiceLogField} from '@app/models/service-log-field.model';
 
 export const storeActions = {
   'ARRAY.ADD': 'ADD',
+  'ARRAY.ADD.START': 'ADD_TO_START',
   'ARRAY.DELETE.PRIMITIVE': 'DELETE_PRIMITIVE',
   'ARRAY.DELETE.OBJECT': 'DELETE_OBJECT',
   'ARRAY.CLEAR': 'CLEAR',
@@ -46,6 +47,7 @@ export interface AppStore {
   auditLogs: AuditLog[];
   serviceLogs: ServiceLog[];
   serviceLogsHistogramData: BarGraph[];
+  serviceLogsTruncated: ServiceLog[];
   graphs: Graph[];
   hosts: Node[];
   userConfigs: UserConfig[];
@@ -86,6 +88,13 @@ export class CollectionModelService extends ModelService {
     });
   }
 
+  addInstancesToStart(instances: any[]): void {
+    this.store.dispatch({
+      type: `${storeActions['ARRAY.ADD.START']}_${this.modelName}`,
+      payload: instances
+    });
+  }
+
   deleteObjectInstance(instance: any): void {
     this.store.dispatch({
       type: `${storeActions['ARRAY.DELETE.OBJECT']}_${this.modelName}`,
@@ -143,6 +152,8 @@ export function getCollectionReducer(modelName: string, defaultState: any = []):
     switch (action.type) {
       case `${storeActions['ARRAY.ADD']}_${modelName}`:
         return [...state, ...action.payload];
+      case `${storeActions['ARRAY.ADD.START']}_${modelName}`:
+        return [...action.payload, ...state];
       case `${storeActions['ARRAY.DELETE.OBJECT']}_${modelName}`:
         return state.filter(instance => instance.id !== action.payload.id);
       case `${storeActions['ARRAY.DELETE.PRIMITIVE']}_${modelName}`:

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/services/component-actions.service.spec.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/component-actions.service.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/component-actions.service.spec.ts
index e737155..3dbd992 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/services/component-actions.service.spec.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/services/component-actions.service.spec.ts
@@ -19,6 +19,7 @@
 import {TestBed, inject} from '@angular/core/testing';
 import {StoreModule} from '@ngrx/store';
 import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service';
+import {AppStateService, appState} from '@app/services/storage/app-state.service';
 import {ClustersService, clusters} from '@app/services/storage/clusters.service';
 import {ComponentsService, components} from '@app/services/storage/components.service';
 import {HostsService, hosts} from '@app/services/storage/hosts.service';
@@ -27,6 +28,7 @@ import {ServiceLogsService, serviceLogs} from '@app/services/storage/service-log
 import {AuditLogsFieldsService, auditLogsFields} from '@app/services/storage/audit-logs-fields.service';
 import {ServiceLogsFieldsService, serviceLogsFields} from '@app/services/storage/service-logs-fields.service';
 import {ServiceLogsHistogramDataService, serviceLogsHistogramData} from '@app/services/storage/service-logs-histogram-data.service';
+import {ServiceLogsTruncatedService, serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service';
 import {FilteringService} from '@app/services/filtering.service';
 import {HttpClientService} from '@app/services/http-client.service';
 import {LogsContainerService} from '@app/services/logs-container.service';
@@ -48,6 +50,7 @@ describe('ComponentActionsService', () => {
       imports: [
         StoreModule.provideStore({
           appSettings,
+          appState,
           clusters,
           components,
           hosts,
@@ -55,12 +58,14 @@ describe('ComponentActionsService', () => {
           serviceLogs,
           auditLogsFields,
           serviceLogsFields,
-          serviceLogsHistogramData
+          serviceLogsHistogramData,
+          serviceLogsTruncated
         })
       ],
       providers: [
         ComponentActionsService,
         AppSettingsService,
+        AppStateService,
         ClustersService,
         ComponentsService,
         HostsService,
@@ -69,6 +74,7 @@ describe('ComponentActionsService', () => {
         AuditLogsFieldsService,
         ServiceLogsFieldsService,
         ServiceLogsHistogramDataService,
+        ServiceLogsTruncatedService,
         FilteringService,
         {
           provide: HttpClientService,

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/services/component-actions.service.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/component-actions.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/component-actions.service.ts
index dba0f8f..b3ff0b0 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/services/component-actions.service.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/services/component-actions.service.ts
@@ -18,14 +18,16 @@
 
 import {Injectable} from '@angular/core';
 import {AppSettingsService} from '@app/services/storage/app-settings.service';
+import {AppStateService} from '@app/services/storage/app-state.service';
 import {CollectionModelService} from '@app/models/store.model';
 import {FilteringService} from '@app/services/filtering.service';
 import {LogsContainerService} from '@app/services/logs-container.service';
+import {ServiceLog} from '@app/models/service-log.model';
 
 @Injectable()
 export class ComponentActionsService {
 
-  constructor(private appSettings: AppSettingsService, private filtering: FilteringService, private logsContainer: LogsContainerService) {
+  constructor(private appSettings: AppSettingsService, private appState: AppStateService, private filtering: FilteringService, private logsContainer: LogsContainerService) {
   }
 
   //TODO implement actions
@@ -44,6 +46,52 @@ export class ComponentActionsService {
   openHistory() {
   }
 
+  copyLog(log: ServiceLog): void {
+    if (document.queryCommandSupported('copy')) {
+      const text = log.log_message,
+        node = document.createElement('textarea');
+      node.value = text;
+      Object.assign(node.style, {
+        position: 'fixed',
+        top: '0',
+        left: '0',
+        width: '1px',
+        height: '1px',
+        border: 'none',
+        outline: 'none',
+        boxShadow: 'none',
+        backgroundColor: 'transparent',
+        padding: '0'
+      });
+      document.body.appendChild(node);
+      node.select();
+      if (document.queryCommandEnabled('copy')) {
+        document.execCommand('copy');
+      } else {
+        // TODO open failed alert
+      }
+      // TODO success alert
+      document.body.removeChild(node);
+    } else {
+      // TODO failed alert
+    }
+  }
+
+  openLog(log: ServiceLog): void {
+    this.appState.setParameters({
+      isServiceLogsFileView: true,
+      activeLog: {
+        id: log.id,
+        host_name: log.host,
+        component_name: log.type
+      }
+    });
+  }
+
+  openContext(log: ServiceLog): void {
+    this.logsContainer.loadLogContext(log.id, log.host, log.type);
+  }
+
   startCapture(): void {
     this.filtering.startCaptureTimer();
   }


[45/50] [abbrv] ambari git commit: AMBARI-22101. After enabling NameNode HA, Yarn and MapReduce2 shows stale configs. (Ishan via Jaimin)

Posted by rl...@apache.org.
AMBARI-22101. After enabling NameNode HA, Yarn and MapReduce2 shows stale configs. (Ishan via Jaimin)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/29ea0ffb
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/29ea0ffb
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/29ea0ffb

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 29ea0ffb9dca3330b016c6abdb2812151641cd7e
Parents: ff9ae55
Author: Jaimin Jetly <ja...@hortonworks.com>
Authored: Sun Oct 1 23:10:52 2017 -0700
Committer: Jaimin Jetly <ja...@hortonworks.com>
Committed: Sun Oct 1 23:10:52 2017 -0700

----------------------------------------------------------------------
 .../admin/highAvailability/journalNode/step4_controller.js     | 6 +++++-
 .../main/admin/highAvailability/nameNode/step5_controller.js   | 6 +++++-
 2 files changed, 10 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/29ea0ffb/ambari-web/app/controllers/main/admin/highAvailability/journalNode/step4_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/highAvailability/journalNode/step4_controller.js b/ambari-web/app/controllers/main/admin/highAvailability/journalNode/step4_controller.js
index 5bcf78a..fad2b3d 100644
--- a/ambari-web/app/controllers/main/admin/highAvailability/journalNode/step4_controller.js
+++ b/ambari-web/app/controllers/main/admin/highAvailability/journalNode/step4_controller.js
@@ -23,7 +23,7 @@ App.ManageJournalNodeWizardStep4Controller = App.ManageJournalNodeProgressPageCo
   clusterDeployState: 'JOURNALNODE_MANAGEMENT',
   tasksMessagesPrefix: 'admin.manageJournalNode.wizard.step',
 
-  commands: ['stopStandbyNameNode', 'stopServices', 'installJournalNodes', 'deleteJournalNodes', 'startJournalNodes', 'reconfigureHDFS'],
+  commands: ['stopStandbyNameNode', 'stopAllServices', 'installJournalNodes', 'deleteJournalNodes', 'startJournalNodes', 'reconfigureHDFS'],
 
   hdfsSiteTag: "",
 
@@ -33,6 +33,10 @@ App.ManageJournalNodeWizardStep4Controller = App.ManageJournalNodeProgressPageCo
     this.updateComponent('NAMENODE', hostName, 'HDFS',  'INSTALLED');
   },
 
+  stopAllServices: function () {
+    this.stopServices([], true, true);
+  },
+
   installJournalNodes: function () {
     var hostNames = App.router.get('manageJournalNodeWizardController').getJournalNodesToAdd();
     if (hostNames && hostNames.length > 0) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/29ea0ffb/ambari-web/app/controllers/main/admin/highAvailability/nameNode/step5_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/highAvailability/nameNode/step5_controller.js b/ambari-web/app/controllers/main/admin/highAvailability/nameNode/step5_controller.js
index 8e035e8..f1f81d1 100644
--- a/ambari-web/app/controllers/main/admin/highAvailability/nameNode/step5_controller.js
+++ b/ambari-web/app/controllers/main/admin/highAvailability/nameNode/step5_controller.js
@@ -22,11 +22,15 @@ App.HighAvailabilityWizardStep5Controller = App.HighAvailabilityProgressPageCont
 
   name:"highAvailabilityWizardStep5Controller",
 
-  commands: ['stopServices', 'installNameNode', 'installJournalNodes', 'reconfigureHDFS', 'startJournalNodes', 'disableSNameNode'],
+  commands: ['stopAllServices', 'installNameNode', 'installJournalNodes', 'reconfigureHDFS', 'startJournalNodes', 'disableSNameNode'],
 
   hdfsSiteTag : "",
   coreSiteTag : "",
 
+  stopAllServices: function () {
+    this.stopServices([], true, true);
+  },
+
   installNameNode: function () {
     var hostName = this.get('content.masterComponentHosts').filterProperty('component', 'NAMENODE').findProperty('isInstalled', false).hostName;
     this.createInstallComponentTask('NAMENODE', hostName, "HDFS");


[18/50] [abbrv] ambari git commit: AMBARI-22075. Use symlinks for Solr Data Manager (mgergely)

Posted by rl...@apache.org.
AMBARI-22075. Use symlinks for Solr Data Manager (mgergely)

Change-Id: Ibe5a0df15f3a8fe378734f5899a8f17bba6127ca


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9b259142
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9b259142
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9b259142

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 9b259142f47d1ebe55e26fbfdb0494cb825f8b2c
Parents: 4da28df
Author: Miklos Gergely <mg...@hortonworks.com>
Authored: Wed Sep 27 19:56:09 2017 +0200
Committer: Miklos Gergely <mg...@hortonworks.com>
Committed: Wed Sep 27 21:18:03 2017 +0200

----------------------------------------------------------------------
 .../src/main/package/deb/solr-client/postinst                  | 6 +++++-
 .../src/main/package/rpm/solr-client/postinstall.sh            | 6 +++++-
 ambari-infra/ambari-infra-solr-client/build.xml                | 3 +++
 .../src/main/python/solrDataManager.py                         | 0
 4 files changed, 13 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/9b259142/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/postinst
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/postinst b/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/postinst
index c339cd0..ccc377b 100644
--- a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/postinst
+++ b/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/postinst
@@ -20,5 +20,9 @@ SOLR_CLOUD_CLI_SOURCE="/usr/lib/ambari-infra-solr-client/solrCloudCli.sh"
 SOLR_INDEX_TOOL_LINK_NAME="/usr/bin/infra-lucene-index-tool"
 SOLR_INDEX_TOOL_SOURCE="/usr/lib/ambari-infra-solr-client/solrIndexHelper.sh"
 
+SOLR_DATA_MANAGER_LINK_NAME="/usr/bin/infra-solr-data-manager"
+SOLR_DATA_MANAGER_SOURCE="/usr/lib/ambari-infra-solr-client/solrDataManager.py"
+
 rm -f $SOLR_CLOUD_CLI_LINK_NAME ; ln -s $SOLR_CLOUD_CLI_SOURCE $SOLR_CLOUD_CLI_LINK_NAME
-rm -f $SOLR_INDEX_TOOL_LINK_NAME ; ln -s $SOLR_INDEX_TOOL_SOURCE $SOLR_INDEX_TOOL_LINK_NAME
\ No newline at end of file
+rm -f $SOLR_INDEX_TOOL_LINK_NAME ; ln -s $SOLR_INDEX_TOOL_SOURCE $SOLR_INDEX_TOOL_LINK_NAME
+rm -f $SOLR_DATA_MANAGER_LINK_NAME ; ln -s $SOLR_DATA_MANAGER_SOURCE $SOLR_DATA_MANAGER_LINK_NAME
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/9b259142/ambari-infra/ambari-infra-assembly/src/main/package/rpm/solr-client/postinstall.sh
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/rpm/solr-client/postinstall.sh b/ambari-infra/ambari-infra-assembly/src/main/package/rpm/solr-client/postinstall.sh
index c339cd0..ccc377b 100644
--- a/ambari-infra/ambari-infra-assembly/src/main/package/rpm/solr-client/postinstall.sh
+++ b/ambari-infra/ambari-infra-assembly/src/main/package/rpm/solr-client/postinstall.sh
@@ -20,5 +20,9 @@ SOLR_CLOUD_CLI_SOURCE="/usr/lib/ambari-infra-solr-client/solrCloudCli.sh"
 SOLR_INDEX_TOOL_LINK_NAME="/usr/bin/infra-lucene-index-tool"
 SOLR_INDEX_TOOL_SOURCE="/usr/lib/ambari-infra-solr-client/solrIndexHelper.sh"
 
+SOLR_DATA_MANAGER_LINK_NAME="/usr/bin/infra-solr-data-manager"
+SOLR_DATA_MANAGER_SOURCE="/usr/lib/ambari-infra-solr-client/solrDataManager.py"
+
 rm -f $SOLR_CLOUD_CLI_LINK_NAME ; ln -s $SOLR_CLOUD_CLI_SOURCE $SOLR_CLOUD_CLI_LINK_NAME
-rm -f $SOLR_INDEX_TOOL_LINK_NAME ; ln -s $SOLR_INDEX_TOOL_SOURCE $SOLR_INDEX_TOOL_LINK_NAME
\ No newline at end of file
+rm -f $SOLR_INDEX_TOOL_LINK_NAME ; ln -s $SOLR_INDEX_TOOL_SOURCE $SOLR_INDEX_TOOL_LINK_NAME
+rm -f $SOLR_DATA_MANAGER_LINK_NAME ; ln -s $SOLR_DATA_MANAGER_SOURCE $SOLR_DATA_MANAGER_LINK_NAME
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/9b259142/ambari-infra/ambari-infra-solr-client/build.xml
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-solr-client/build.xml b/ambari-infra/ambari-infra-solr-client/build.xml
index 25ff0cb..d9d3a6c 100644
--- a/ambari-infra/ambari-infra-solr-client/build.xml
+++ b/ambari-infra/ambari-infra-solr-client/build.xml
@@ -42,12 +42,15 @@
       <fileset file="src/main/resources/log4j.properties"/>
     </copy>
     <chmod file="target/package/*.sh" perm="755"/>
+    <chmod file="target/package/*.py" perm="755"/>
     <tar compression="gzip" destfile="target/ambari-infra-solr-client.tar.gz">
       <tarfileset mode="755" dir="target/package">
         <include name="*.sh"/>
+        <include name="*.py"/>
       </tarfileset>
       <tarfileset mode="664" dir="target/package">
         <exclude name="*.sh"/>
+        <exclude name="*.py"/>
       </tarfileset>
     </tar>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/9b259142/ambari-infra/ambari-infra-solr-client/src/main/python/solrDataManager.py
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-solr-client/src/main/python/solrDataManager.py b/ambari-infra/ambari-infra-solr-client/src/main/python/solrDataManager.py
old mode 100644
new mode 100755


[36/50] [abbrv] ambari git commit: AMBARI-22083 - Wrong Hadoop Home Directory Is Being Picked Up on MAINT/PATCH Upgraded Clusters (jonathanhurley)

Posted by rl...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py b/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
index f0a89a9..fbe5403 100644
--- a/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
+++ b/ambari-server/src/test/python/stacks/2.3/SPARK/test_spark_thrift_server.py
@@ -96,7 +96,7 @@ class TestSparkThriftServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/user/spark',
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.3.2.0-2067/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
         hdfs_site = {u'a': u'b'},
@@ -104,7 +104,7 @@ class TestSparkThriftServer(RMFTestCase):
         principal_name = UnknownConfigurationMock(),
         user = 'hdfs',
         owner = 'spark',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/usr/hdp/2.3.2.0-2067/hadoop/conf',
         type = 'directory',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         dfs_type = '',
@@ -113,7 +113,7 @@ class TestSparkThriftServer(RMFTestCase):
     self.assertResourceCalled('HdfsResource', None,
         immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.3.2.0-2067/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
         hdfs_site = {u'a': u'b'},
@@ -122,7 +122,7 @@ class TestSparkThriftServer(RMFTestCase):
         user = 'hdfs',
         action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         dfs_type = '',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/usr/hdp/2.3.2.0-2067/hadoop/conf',
     )
     self.assertResourceCalled('PropertiesFile', '/usr/hdp/current/spark-client/conf/spark-defaults.conf',
         owner = 'spark',

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/test/python/stacks/2.5/RANGER_KMS/test_kms_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/RANGER_KMS/test_kms_server.py b/ambari-server/src/test/python/stacks/2.5/RANGER_KMS/test_kms_server.py
index 7ebe2f5..34ca102 100644
--- a/ambari-server/src/test/python/stacks/2.5/RANGER_KMS/test_kms_server.py
+++ b/ambari-server/src/test/python/stacks/2.5/RANGER_KMS/test_kms_server.py
@@ -171,8 +171,8 @@ class TestRangerKMS(RMFTestCase):
                         security_enabled = False,
                         keytab = None,
                         kinit_path_local = '/usr/bin/kinit',
-                        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                        hadoop_bin_dir = '/usr/hdp/2.5.0.0-777/hadoop/bin',
+                        hadoop_conf_dir = '/usr/hdp/2.5.0.0-777/hadoop/conf',
                         principal_name = None,
                         hdfs_site = self.getConfig()['configurations']['hdfs-site'],
                         default_fs = 'hdfs://c6401.ambari.apache.org:8020'
@@ -189,8 +189,8 @@ class TestRangerKMS(RMFTestCase):
                         security_enabled = False,
                         keytab = None,
                         kinit_path_local = '/usr/bin/kinit',
-                        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                        hadoop_bin_dir = '/usr/hdp/2.5.0.0-777/hadoop/bin',
+                        hadoop_conf_dir = '/usr/hdp/2.5.0.0-777/hadoop/conf',
                         principal_name = None,
                         hdfs_site = self.getConfig()['configurations']['hdfs-site'],
                         default_fs = 'hdfs://c6401.ambari.apache.org:8020'
@@ -202,8 +202,8 @@ class TestRangerKMS(RMFTestCase):
                         security_enabled = False,
                         keytab = None,
                         kinit_path_local = '/usr/bin/kinit',
-                        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                        hadoop_bin_dir = '/usr/hdp/2.5.0.0-777/hadoop/bin',
+                        hadoop_conf_dir = '/usr/hdp/2.5.0.0-777/hadoop/conf',
                         principal_name = None,
                         hdfs_site = self.getConfig()['configurations']['hdfs-site'],
                         default_fs = 'hdfs://c6401.ambari.apache.org:8020'
@@ -611,8 +611,8 @@ class TestRangerKMS(RMFTestCase):
                         security_enabled = True,
                         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
                         kinit_path_local = '/usr/bin/kinit',
-                        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                        hadoop_bin_dir = '/usr/hdp/2.5.0.0-777/hadoop/bin',
+                        hadoop_conf_dir = '/usr/hdp/2.5.0.0-777/hadoop/conf',
                         principal_name = 'hdfs-cl1@EXAMPLE.COM',
                         hdfs_site = self.getConfig()['configurations']['hdfs-site'],
                         default_fs = 'hdfs://c6401.ambari.apache.org:8020'
@@ -629,8 +629,8 @@ class TestRangerKMS(RMFTestCase):
                         security_enabled = True,
                         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
                         kinit_path_local = '/usr/bin/kinit',
-                        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                        hadoop_bin_dir = '/usr/hdp/2.5.0.0-777/hadoop/bin',
+                        hadoop_conf_dir = '/usr/hdp/2.5.0.0-777/hadoop/conf',
                         principal_name = 'hdfs-cl1@EXAMPLE.COM',
                         hdfs_site = self.getConfig()['configurations']['hdfs-site'],
                         default_fs = 'hdfs://c6401.ambari.apache.org:8020'
@@ -642,8 +642,8 @@ class TestRangerKMS(RMFTestCase):
                         security_enabled = True,
                         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
                         kinit_path_local = '/usr/bin/kinit',
-                        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                        hadoop_bin_dir = '/usr/hdp/2.5.0.0-777/hadoop/bin',
+                        hadoop_conf_dir = '/usr/hdp/2.5.0.0-777/hadoop/conf',
                         principal_name = 'hdfs-cl1@EXAMPLE.COM',
                         hdfs_site = self.getConfig()['configurations']['hdfs-site'],
                         default_fs = 'hdfs://c6401.ambari.apache.org:8020'

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/test/python/stacks/2.5/SPARK/test_spark_livy.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/SPARK/test_spark_livy.py b/ambari-server/src/test/python/stacks/2.5/SPARK/test_spark_livy.py
index 3c7e4a2..4f1fb17 100644
--- a/ambari-server/src/test/python/stacks/2.5/SPARK/test_spark_livy.py
+++ b/ambari-server/src/test/python/stacks/2.5/SPARK/test_spark_livy.py
@@ -56,7 +56,7 @@ class TestSparkClient(RMFTestCase):
         self.assertResourceCalled('HdfsResource', '/user/livy',
                                   immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                                   security_enabled = False,
-                                  hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                                  hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
                                   keytab = UnknownConfigurationMock(),
                                   default_fs = 'hdfs://c6401.ambari.apache.org:8020',
                                   hdfs_site = {u'a': u'b'},
@@ -64,7 +64,7 @@ class TestSparkClient(RMFTestCase):
                                   principal_name = UnknownConfigurationMock(),
                                   user = 'hdfs',
                                   owner = 'livy',
-                                  hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                                  hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
                                   type = 'directory',
                                   action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                                   dfs_type = '',
@@ -73,7 +73,7 @@ class TestSparkClient(RMFTestCase):
         self.assertResourceCalled('HdfsResource', None,
                                   immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                                   security_enabled = False,
-                                  hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                                  hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
                                   keytab = UnknownConfigurationMock(),
                                   default_fs = 'hdfs://c6401.ambari.apache.org:8020',
                                   hdfs_site = {u'a': u'b'},
@@ -82,12 +82,12 @@ class TestSparkClient(RMFTestCase):
                                   user = 'hdfs',
                                   action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                                   dfs_type = '',
-                                  hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                                  hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
                                   )
         self.assertResourceCalled('HdfsResource', '/livy-recovery',
                                   immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                                   security_enabled = False,
-                                  hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                                  hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
                                   keytab = UnknownConfigurationMock(),
                                   default_fs = 'hdfs://c6401.ambari.apache.org:8020',
                                   hdfs_site = {u'a': u'b'},
@@ -95,7 +95,7 @@ class TestSparkClient(RMFTestCase):
                                   principal_name = UnknownConfigurationMock(),
                                   user = 'hdfs',
                                   owner = 'livy',
-                                  hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                                  hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
                                   type = 'directory',
                                   action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                                   dfs_type = '',
@@ -104,7 +104,7 @@ class TestSparkClient(RMFTestCase):
         self.assertResourceCalled('HdfsResource', None,
                                   immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                                   security_enabled = False,
-                                  hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                                  hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
                                   keytab = UnknownConfigurationMock(),
                                   default_fs = 'hdfs://c6401.ambari.apache.org:8020',
                                   hdfs_site = {u'a': u'b'},
@@ -113,7 +113,7 @@ class TestSparkClient(RMFTestCase):
                                   user = 'hdfs',
                                   action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                                   dfs_type = '',
-                                  hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                                  hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
                                   )
         self.assertResourceCalled('File', '/usr/hdp/current/livy-server/conf/livy-env.sh',
                                   content = InlineTemplate(self.getConfig()['configurations']['livy-env']['content']),

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/test/python/stacks/2.5/ZEPPELIN/test_zeppelin_060.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/ZEPPELIN/test_zeppelin_060.py b/ambari-server/src/test/python/stacks/2.5/ZEPPELIN/test_zeppelin_060.py
index 8df053c..7d4217e 100644
--- a/ambari-server/src/test/python/stacks/2.5/ZEPPELIN/test_zeppelin_060.py
+++ b/ambari-server/src/test/python/stacks/2.5/ZEPPELIN/test_zeppelin_060.py
@@ -238,7 +238,7 @@ class TestZeppelin060(RMFTestCase):
     )
     self.assertResourceCalled('HdfsResource', '/user/zeppelin',
         security_enabled = True,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.2.1.0-2067/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
         hdfs_resource_ignore_file = '/var/lib/ambari-agent/data/.hdfs_resource_ignore',
@@ -248,14 +248,14 @@ class TestZeppelin060(RMFTestCase):
         user = 'hdfs',
         owner = 'zeppelin',
         recursive_chown = True,
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/usr/hdp/2.2.1.0-2067/hadoop/conf',
         type = 'directory',
         action = ['create_on_execute'],
         recursive_chmod = True,
     )
     self.assertResourceCalled('HdfsResource', '/user/zeppelin/test',
         security_enabled = True,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.2.1.0-2067/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
         hdfs_resource_ignore_file = '/var/lib/ambari-agent/data/.hdfs_resource_ignore',
@@ -265,14 +265,14 @@ class TestZeppelin060(RMFTestCase):
         user = 'hdfs',
         owner = 'zeppelin',
         recursive_chown = True,
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/usr/hdp/2.2.1.0-2067/hadoop/conf',
         type = 'directory',
         action = ['create_on_execute'],
         recursive_chmod = True,
     )
     self.assertResourceCalled('HdfsResource', '/apps/zeppelin',
         security_enabled = True,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.2.1.0-2067/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
         hdfs_resource_ignore_file = '/var/lib/ambari-agent/data/.hdfs_resource_ignore',
@@ -282,14 +282,14 @@ class TestZeppelin060(RMFTestCase):
         user = 'hdfs',
         owner = 'zeppelin',
         recursive_chown = True,
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/usr/hdp/2.2.1.0-2067/hadoop/conf',
         type = 'directory',
         action = ['create_on_execute'],
         recursive_chmod = True,
     )
     self.assertResourceCalled('HdfsResource', '/apps/zeppelin/tmp',
         security_enabled = True,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.2.1.0-2067/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         source = '/tmp',
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
@@ -301,14 +301,14 @@ class TestZeppelin060(RMFTestCase):
         user = 'hdfs',
         owner = 'zeppelin',
         group = 'zeppelin',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/usr/hdp/2.2.1.0-2067/hadoop/conf',
         type = 'file',
         action = ['create_on_execute'],
         mode = 0444,
     )
     self.assertResourceCalled('HdfsResource', None,
         security_enabled = True,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.2.1.0-2067/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
         hdfs_resource_ignore_file = '/var/lib/ambari-agent/data/.hdfs_resource_ignore',
@@ -317,7 +317,7 @@ class TestZeppelin060(RMFTestCase):
         principal_name = UnknownConfigurationMock(),
         user = 'hdfs',
         action = ['execute'],
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/usr/hdp/2.2.1.0-2067/hadoop/conf',
     )
     self.assertResourceCalled('File', '/etc/zeppelin/conf/interpreter.json',
         content=interpreter_json_generated.template_after_base,

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-secured.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-secured.json b/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-secured.json
index 103c86b..f7f054a 100644
--- a/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-secured.json
+++ b/ambari-server/src/test/python/stacks/2.5/configs/ranger-kms-secured.json
@@ -214,7 +214,7 @@
         "not_managed_hdfs_path_list": "[\"/tmp\"]",
         "ambari_db_rca_url": "jdbc:postgresql://c6401.ambari.apache.org/ambarirca",
         "java_version": "8",
-        "repo_info": "[{\"baseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.5.0.0-801\",\"osType\":\"redhat6\",\"repoId\":\"HDP-2.5\",\"repoName\":\"HDP\",\"defaultBaseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/updates/2.5.0.0\",\"latestBaseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.5.0.0-801\",\"baseSaved\":true},{\"baseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP-UTILS-1.1.0.21/repos/centos6\",\"osType\":\"redhat6\",\"repoId\":\"HDP-UTILS-1.1.0.21\",\"repoName\":\"HDP-UTILS\",\"defaultBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/centos6\",\"latestBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/centos6\",\"baseSaved\":true}]",
+        "repo_info": "[{\"baseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.5.0.0-777\",\"osType\":\"redhat6\",\"repoId\":\"HDP-2.5\",\"repoName\":\"HDP\",\"defaultBaseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/updates/2.5.0.0\",\"latestBaseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP/centos6/2.x/BUILDS/2.5.0.0-777\",\"baseSaved\":true},{\"baseUrl\":\"http://s3.amazonaws.com/dev.hortonworks.com/HDP-UTILS-1.1.0.21/repos/centos6\",\"osType\":\"redhat6\",\"repoId\":\"HDP-UTILS-1.1.0.21\",\"repoName\":\"HDP-UTILS\",\"defaultBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/centos6\",\"latestBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.21/repos/centos6\",\"baseSaved\":true}]",
         "package_list": "[{\"name\":\"ranger_${stack_version}-kms\",\"condition\":\"\",\"skipUpgrade\":false}]",
         "db_name": "ambari",
         "group_list": "[\"kms\",\"ranger\",\"hadoop\",\"users\"]",
@@ -230,7 +230,7 @@
         "service_package_folder": "common-services/RANGER_KMS/0.5.0.2.3/package",
         "script": "scripts/kms_server.py",
         "hooks_folder": "HDP/2.0.6/hooks",
-        "version": "2.5.0.0-801",
+        "version": "2.5.0.0-777",
         "max_duration_for_retries": "0",
         "command_retry_enabled": "false",
         "command_timeout": "600",
@@ -605,7 +605,7 @@
             "dfs.web.authentication.kerberos.keytab": "/etc/security/keytabs/spnego.service.keytab",
             "fs.permissions.umask-mode": "022",
             "dfs.namenode.stale.datanode.interval": "30000",
-            "dfs.datanode.ipc.address": "0.0.0.0:8010",
+            "dfs.datanode.ipc.address": "0.0.0.0:7770",
             "dfs.datanode.failed.volumes.tolerated": "0",
             "dfs.datanode.data.dir": "/grid/0/hadoop/hdfs/data",
             "dfs.namenode.http-address": "c6401.ambari.apache.org:50070",

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/test/python/stacks/2.6/DRUID/test_druid.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.6/DRUID/test_druid.py b/ambari-server/src/test/python/stacks/2.6/DRUID/test_druid.py
index ca689b0..d8b9fe2 100644
--- a/ambari-server/src/test/python/stacks/2.6/DRUID/test_druid.py
+++ b/ambari-server/src/test/python/stacks/2.6/DRUID/test_druid.py
@@ -597,7 +597,7 @@ class TestDruid(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/user/druid',
                               immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                               security_enabled = False,
-                              hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
                               keytab = UnknownConfigurationMock(),
                               default_fs = 'hdfs://c6401.ambari.apache.org:8020',
                               hdfs_site = {u'a': u'b'},
@@ -606,7 +606,7 @@ class TestDruid(RMFTestCase):
                               user = 'hdfs',
                               owner = 'druid',
                               group='hadoop',
-                              hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                              hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
                               type = 'directory',
                               action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                               dfs_type = '',
@@ -617,7 +617,7 @@ class TestDruid(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/user/druid/data',
                               immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                               security_enabled = False,
-                              hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
                               keytab = UnknownConfigurationMock(),
                               default_fs = 'hdfs://c6401.ambari.apache.org:8020',
                               hdfs_site = {u'a': u'b'},
@@ -625,7 +625,7 @@ class TestDruid(RMFTestCase):
                               principal_name = 'missing_principal',
                               user = 'hdfs',
                               owner = 'druid',
-                              hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                              hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
                               type = 'directory',
                               action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                               dfs_type = '',
@@ -634,7 +634,7 @@ class TestDruid(RMFTestCase):
                               )
     self.assertResourceCalled('HdfsResource', '/tmp',
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         dfs_type = '',
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
@@ -644,7 +644,7 @@ class TestDruid(RMFTestCase):
         principal_name = 'missing_principal',
         user = 'hdfs',
         owner = 'hdfs',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
         type = 'directory',
         action = ['create_on_execute'],
         immutable_paths = [u'/apps/hive/warehouse',
@@ -657,7 +657,7 @@ class TestDruid(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/tmp/druid-indexing',
                               immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                               security_enabled = False,
-                              hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
                               keytab = UnknownConfigurationMock(),
                               default_fs = 'hdfs://c6401.ambari.apache.org:8020',
                               hdfs_site = {u'a': u'b'},
@@ -666,7 +666,7 @@ class TestDruid(RMFTestCase):
                               user = 'hdfs',
                               owner = 'druid',
                               group='hadoop',
-                              hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                              hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
                               type = 'directory',
                               action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                               dfs_type = '',
@@ -676,7 +676,7 @@ class TestDruid(RMFTestCase):
     self.assertResourceCalled('HdfsResource', '/user/druid/logs',
                               immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                               security_enabled = False,
-                              hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
                               keytab = UnknownConfigurationMock(),
                               default_fs = 'hdfs://c6401.ambari.apache.org:8020',
                               hdfs_site = {u'a': u'b'},
@@ -685,7 +685,7 @@ class TestDruid(RMFTestCase):
                               user = 'hdfs',
                               owner = 'druid',
                               group='hadoop',
-                              hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                              hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
                               type = 'directory',
                               action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                               dfs_type = '',

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/test/python/stacks/2.6/SPARK2/test_spark_livy2.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.6/SPARK2/test_spark_livy2.py b/ambari-server/src/test/python/stacks/2.6/SPARK2/test_spark_livy2.py
index c370bbc..60e7fd2 100644
--- a/ambari-server/src/test/python/stacks/2.6/SPARK2/test_spark_livy2.py
+++ b/ambari-server/src/test/python/stacks/2.6/SPARK2/test_spark_livy2.py
@@ -56,7 +56,7 @@ class TestSparkClient(RMFTestCase):
         self.assertResourceCalled('HdfsResource', '/user/livy',
                                   immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                                   security_enabled = False,
-                                  hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                                  hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
                                   keytab = UnknownConfigurationMock(),
                                   default_fs = 'hdfs://c6401.ambari.apache.org:8020',
                                   hdfs_site = {u'a': u'b'},
@@ -64,7 +64,7 @@ class TestSparkClient(RMFTestCase):
                                   principal_name = UnknownConfigurationMock(),
                                   user = 'hdfs',
                                   owner = 'livy',
-                                  hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                                  hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
                                   type = 'directory',
                                   action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                                   dfs_type = '',
@@ -73,7 +73,7 @@ class TestSparkClient(RMFTestCase):
         self.assertResourceCalled('HdfsResource', None,
                                   immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                                   security_enabled = False,
-                                  hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                                  hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
                                   keytab = UnknownConfigurationMock(),
                                   default_fs = 'hdfs://c6401.ambari.apache.org:8020',
                                   hdfs_site = {u'a': u'b'},
@@ -82,12 +82,12 @@ class TestSparkClient(RMFTestCase):
                                   user = 'hdfs',
                                   action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                                   dfs_type = '',
-                                  hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                                  hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
                                   )
         self.assertResourceCalled('HdfsResource', '/livy2-recovery',
                                   immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                                   security_enabled = False,
-                                  hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                                  hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
                                   keytab = UnknownConfigurationMock(),
                                   default_fs = 'hdfs://c6401.ambari.apache.org:8020',
                                   hdfs_site = {u'a': u'b'},
@@ -95,7 +95,7 @@ class TestSparkClient(RMFTestCase):
                                   principal_name = UnknownConfigurationMock(),
                                   user = 'hdfs',
                                   owner = 'livy',
-                                  hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                                  hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
                                   type = 'directory',
                                   action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                                   dfs_type = '',
@@ -104,7 +104,7 @@ class TestSparkClient(RMFTestCase):
         self.assertResourceCalled('HdfsResource', None,
                                   immutable_paths = self.DEFAULT_IMMUTABLE_PATHS,
                                   security_enabled = False,
-                                  hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                                  hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
                                   keytab = UnknownConfigurationMock(),
                                   default_fs = 'hdfs://c6401.ambari.apache.org:8020',
                                   hdfs_site = {u'a': u'b'},
@@ -113,7 +113,7 @@ class TestSparkClient(RMFTestCase):
                                   user = 'hdfs',
                                   action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                                   dfs_type = '',
-                                  hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+                                  hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
                                   )
         self.assertResourceCalled('File', '/usr/hdp/current/livy2-server/conf/livy-env.sh',
                                   content = InlineTemplate(self.getConfig()['configurations']['livy2-env']['content']),

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py b/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
index e0e4259..e8ef262 100644
--- a/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
+++ b/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
@@ -222,7 +222,7 @@ class TestZeppelin070(RMFTestCase):
                               sudo=True,
                               )
     self.assertResourceCalled('HdfsResource', '/user/zeppelin',
-                              hadoop_bin_dir='/usr/hdp/current/hadoop-client/bin',
+                              hadoop_bin_dir='/usr/hdp/2.5.0.0-1235/hadoop/bin',
                               default_fs=u'hdfs://c6401.ambari.apache.org:8020',
                               hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                               hdfs_site={u'a': u'b'},
@@ -233,13 +233,13 @@ class TestZeppelin070(RMFTestCase):
                               principal_name=UnknownConfigurationMock(),
                               recursive_chown=True,
                               security_enabled=False,
-                              hadoop_conf_dir='/usr/hdp/current/hadoop-client/conf',
+                              hadoop_conf_dir='/usr/hdp/2.5.0.0-1235/hadoop/conf',
                               type='directory',
                               action=['create_on_execute'],
                               recursive_chmod=True
                               )
     self.assertResourceCalled('HdfsResource', '/user/zeppelin/test',
-                              hadoop_bin_dir='/usr/hdp/current/hadoop-client/bin',
+                              hadoop_bin_dir='/usr/hdp/2.5.0.0-1235/hadoop/bin',
                               default_fs=u'hdfs://c6401.ambari.apache.org:8020',
                               hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                               hdfs_site={u'a': u'b'},
@@ -247,7 +247,7 @@ class TestZeppelin070(RMFTestCase):
                               user='hdfs',
                               owner='zeppelin',
                               recursive_chown=True,
-                              hadoop_conf_dir='/usr/hdp/current/hadoop-client/conf',
+                              hadoop_conf_dir='/usr/hdp/2.5.0.0-1235/hadoop/conf',
                               type='directory',
                               action=['create_on_execute'],
                               recursive_chmod=True,
@@ -256,7 +256,7 @@ class TestZeppelin070(RMFTestCase):
                               security_enabled=False,
                               )
     self.assertResourceCalled('HdfsResource', '/apps/zeppelin',
-                              hadoop_bin_dir='/usr/hdp/current/hadoop-client/bin',
+                              hadoop_bin_dir='/usr/hdp/2.5.0.0-1235/hadoop/bin',
                               default_fs=u'hdfs://c6401.ambari.apache.org:8020',
                               hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                               hdfs_site={u'a': u'b'},
@@ -264,7 +264,7 @@ class TestZeppelin070(RMFTestCase):
                               user='hdfs',
                               owner='zeppelin',
                               recursive_chown=True,
-                              hadoop_conf_dir='/usr/hdp/current/hadoop-client/conf',
+                              hadoop_conf_dir='/usr/hdp/2.5.0.0-1235/hadoop/conf',
                               type='directory',
                               action=['create_on_execute'],
                               recursive_chmod=True,
@@ -273,7 +273,7 @@ class TestZeppelin070(RMFTestCase):
                               security_enabled=False,
                               )
     self.assertResourceCalled('HdfsResource', '/apps/zeppelin/tmp',
-                              hadoop_bin_dir='/usr/hdp/current/hadoop-client/bin',
+                              hadoop_bin_dir='/usr/hdp/2.5.0.0-1235/hadoop/bin',
                               source='/tmp',
                               default_fs=u'hdfs://c6401.ambari.apache.org:8020',
                               replace_existing_files=True,
@@ -283,7 +283,7 @@ class TestZeppelin070(RMFTestCase):
                               user='hdfs',
                               owner='zeppelin',
                               group='zeppelin',
-                              hadoop_conf_dir='/usr/hdp/current/hadoop-client/conf',
+                              hadoop_conf_dir='/usr/hdp/2.5.0.0-1235/hadoop/conf',
                               type='file',
                               action=['create_on_execute'],
                               mode=0444,
@@ -292,14 +292,14 @@ class TestZeppelin070(RMFTestCase):
                               security_enabled=False,
                               )
     self.assertResourceCalled('HdfsResource', None,
-                              hadoop_bin_dir='/usr/hdp/current/hadoop-client/bin',
+                              hadoop_bin_dir='/usr/hdp/2.5.0.0-1235/hadoop/bin',
                               default_fs=u'hdfs://c6401.ambari.apache.org:8020',
                               hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                               hdfs_site={u'a': u'b'},
                               kinit_path_local='/usr/bin/kinit',
                               user='hdfs',
                               action=['execute'],
-                              hadoop_conf_dir='/usr/hdp/current/hadoop-client/conf',
+                              hadoop_conf_dir='/usr/hdp/2.5.0.0-1235/hadoop/conf',
                               keytab=UnknownConfigurationMock(),
                               principal_name=UnknownConfigurationMock(),
                               security_enabled=False,
@@ -307,7 +307,7 @@ class TestZeppelin070(RMFTestCase):
 
     self.assertResourceCalled('HdfsResource', '/etc/zeppelin/conf/interpreter.json',
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         source = '/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json',
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
@@ -318,7 +318,7 @@ class TestZeppelin070(RMFTestCase):
         user = 'hdfs',
         owner = 'zeppelin',
         group = 'zeppelin',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
         type = 'file',
         action = ['download_on_execute'],
     )
@@ -331,7 +331,7 @@ class TestZeppelin070(RMFTestCase):
 
     self.assertResourceCalled('HdfsResource', '/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json',
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         source = '/etc/zeppelin/conf/interpreter.json',
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
@@ -343,14 +343,14 @@ class TestZeppelin070(RMFTestCase):
         user = 'hdfs',
         owner = 'zeppelin',
         group = 'zeppelin',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
         type = 'file',
         action = ['create_on_execute'],
     )
 
     self.assertResourceCalled('HdfsResource', '/etc/zeppelin/conf/interpreter.json',
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         source = '/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json',
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
@@ -361,7 +361,7 @@ class TestZeppelin070(RMFTestCase):
         user = 'hdfs',
         owner = 'zeppelin',
         group = 'zeppelin',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
         type = 'file',
         action = ['download_on_execute'],
     )
@@ -374,7 +374,7 @@ class TestZeppelin070(RMFTestCase):
 
     self.assertResourceCalled('HdfsResource', '/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json',
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         source = '/etc/zeppelin/conf/interpreter.json',
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
@@ -386,14 +386,14 @@ class TestZeppelin070(RMFTestCase):
         user = 'hdfs',
         owner = 'zeppelin',
         group = 'zeppelin',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
         type = 'file',
         action = ['create_on_execute'],
     )
 
     self.assertResourceCalled('HdfsResource', '/etc/zeppelin/conf/interpreter.json',
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         source = '/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json',
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
@@ -404,7 +404,7 @@ class TestZeppelin070(RMFTestCase):
         user = 'hdfs',
         owner = 'zeppelin',
         group = 'zeppelin',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
         type = 'file',
         action = ['download_on_execute'],
     )
@@ -417,7 +417,7 @@ class TestZeppelin070(RMFTestCase):
 
     self.assertResourceCalled('HdfsResource', '/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json',
         security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        hadoop_bin_dir = '/usr/hdp/2.5.0.0-1235/hadoop/bin',
         keytab = UnknownConfigurationMock(),
         source = '/etc/zeppelin/conf/interpreter.json',
         default_fs = 'hdfs://c6401.ambari.apache.org:8020',
@@ -429,7 +429,7 @@ class TestZeppelin070(RMFTestCase):
         user = 'hdfs',
         owner = 'zeppelin',
         group = 'zeppelin',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/usr/hdp/2.5.0.0-1235/hadoop/conf',
         type = 'file',
         action = ['create_on_execute'],
     )


[42/50] [abbrv] ambari git commit: AMBARI-22103.HiveView 1.5 is not showing the Visualization page when accessed over Knox(Venkata Sairam)

Posted by rl...@apache.org.
AMBARI-22103.HiveView 1.5 is not showing the Visualization page when accessed over Knox(Venkata Sairam)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7e6910f7
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7e6910f7
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7e6910f7

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 7e6910f74b58795fc10de908eaf1103541342ac6
Parents: d4378aa
Author: Venkata Sairam <ve...@gmail.com>
Authored: Sat Sep 30 17:30:20 2017 +0530
Committer: Venkata Sairam <ve...@gmail.com>
Committed: Sat Sep 30 17:30:20 2017 +0530

----------------------------------------------------------------------
 .../ui/hive-web/app/adapters/application.js       | 18 ++++++++++++++++++
 .../hive-web/app/controllers/visualization-ui.js  | 10 ++++++----
 .../resources/ui/hive-web/app/utils/constants.js  |  1 +
 3 files changed, 25 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/7e6910f7/contrib/views/hive-next/src/main/resources/ui/hive-web/app/adapters/application.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/adapters/application.js b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/adapters/application.js
index 2c68b89..cd93407 100644
--- a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/adapters/application.js
+++ b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/adapters/application.js
@@ -50,5 +50,23 @@ export default DS.RESTAdapter.extend({
     var prefix = constants.adapter.apiPrefix + version + constants.adapter.instancePrefix + instanceName;
     var url = this._super.apply(this, arguments);
     return prefix + url;
+  },
+
+  buildAssetURL: function () {
+    var version = constants.adapter.version,
+        instanceName = constants.adapter.instance;
+
+    var params = window.location.pathname.split('/').filter(function (param) {
+      return !!param;
+    });
+
+    if (params[params.length - 3] === 'HIVE') {
+      version = params[params.length - 2];
+      instanceName = params[params.length - 1];
+    }
+    var assetPrefix = constants.adapter.apiPrefix.replace("/" + "api" + "/" + "v1", "").replace("versions/","");
+
+    var url = assetPrefix + version + "/" + instanceName;
+    return url;
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/7e6910f7/contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/visualization-ui.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/visualization-ui.js b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/visualization-ui.js
index c908afd..3f5b878 100644
--- a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/visualization-ui.js
+++ b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/controllers/visualization-ui.js
@@ -32,8 +32,8 @@ export default Ember.Controller.extend({
 
   polestarUrl: '',
   voyagerUrl: '',
-  polestarPath: 'polestar/#/',
-  voyagerPath: 'voyager/#/',
+  polestarPath: 'polestar/index.html#/',
+  voyagerPath: 'voyager/index.html#/',
 
   showDataExplorer: true,
   showAdvVisulization: false,
@@ -79,8 +79,10 @@ export default Ember.Controller.extend({
           }
           this.set("error", null);
           var id = model.get('id');
-          this.set("polestarUrl", this.get('polestarPath') + "?url=" + url);
-          this.set("voyagerUrl", this.get('voyagerPath') + "?url=" + url);
+          var pstarUrl = this.container.lookup('adapter:application').buildAssetURL("") + "/" + this.get('polestarPath') + "?url=" + url;
+          this.set("polestarUrl", pstarUrl);
+          var vUrl = this.container.lookup('adapter:application').buildAssetURL("") + "/" + this.get('voyagerPath') + "?url=" + url;
+          this.set("voyagerUrl", vUrl);
           Ember.run.scheduleOnce('afterRender', this, function(){
             self.alterIframe();
           });

http://git-wip-us.apache.org/repos/asf/ambari/blob/7e6910f7/contrib/views/hive-next/src/main/resources/ui/hive-web/app/utils/constants.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/utils/constants.js b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/utils/constants.js
index 348454b..fafffc5 100644
--- a/contrib/views/hive-next/src/main/resources/ui/hive-web/app/utils/constants.js
+++ b/contrib/views/hive-next/src/main/resources/ui/hive-web/app/utils/constants.js
@@ -200,6 +200,7 @@ export default Ember.Object.create({
     version: '2.0.0',
     instance: 'Hive',
     apiPrefix: '/api/v1/views/HIVE/versions/',
+    assetPrefix: '/views/HIVE/',
     instancePrefix: '/instances/',
     resourcePrefix: 'resources/'
   },


[44/50] [abbrv] ambari git commit: AMBARI-22092. Blueprint cluster creation constantly throwing exceptions

Posted by rl...@apache.org.
AMBARI-22092. Blueprint cluster creation constantly throwing exceptions


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ff9ae55c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ff9ae55c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ff9ae55c

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: ff9ae55cd4b1728e21cf0e30e6a8040ce4e3cdab
Parents: a66e2de
Author: Attila Doroszlai <ad...@hortonworks.com>
Authored: Fri Sep 29 13:32:45 2017 +0200
Committer: Attila Doroszlai <ad...@hortonworks.com>
Committed: Sat Sep 30 19:22:42 2017 +0200

----------------------------------------------------------------------
 .../server/topology/AsyncCallableService.java   | 110 ++++++++--------
 .../ambari/server/topology/TopologyManager.java |  26 +---
 .../topology/tasks/ConfigureClusterTask.java    | 124 ++++++++++++-------
 .../topology/AsyncCallableServiceTest.java      |  89 ++++---------
 .../ClusterDeployWithStartOnlyTest.java         |   6 +
 ...InstallWithoutStartOnComponentLevelTest.java |   6 +
 .../ClusterInstallWithoutStartTest.java         |   6 +
 .../topology/ConfigureClusterTaskTest.java      |  64 +++-------
 .../server/topology/TopologyManagerTest.java    |  14 ++-
 9 files changed, 195 insertions(+), 250 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/ff9ae55c/ambari-server/src/main/java/org/apache/ambari/server/topology/AsyncCallableService.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/AsyncCallableService.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/AsyncCallableService.java
index 9a68ea7..db57378 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/AsyncCallableService.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/AsyncCallableService.java
@@ -18,17 +18,19 @@
 
 package org.apache.ambari.server.topology;
 
-import java.util.Calendar;
-import java.util.HashSet;
-import java.util.Set;
 import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Executors;
 import java.util.concurrent.Future;
 import java.util.concurrent.ScheduledExecutorService;
 import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.base.Preconditions;
+
 /**
  * Callable service implementation for executing tasks asynchronously.
  * The service repeatedly tries to execute the provided task till it successfully completes, or the provided timeout
@@ -45,89 +47,75 @@ public class AsyncCallableService<T> implements Callable<T> {
 
   // the task to be executed
   private final Callable<T> task;
+  private final String taskName;
 
   // the total time the allowed for the task to be executed (retries will be happen within this timeframe in
   // milliseconds)
   private final long timeout;
 
   // the delay between two consecutive execution trials in milliseconds
-  private final long delay;
+  private final long retryDelay;
 
-  private T serviceResult;
+  public AsyncCallableService(Callable<T> task, long timeout, long retryDelay, String taskName) {
+    this(task, timeout, retryDelay, taskName, Executors.newScheduledThreadPool(1));
+  }
 
-  private final Set<Exception> errors = new HashSet<>();
+  public AsyncCallableService(Callable<T> task, long timeout, long retryDelay, String taskName, ScheduledExecutorService executorService) {
+    Preconditions.checkArgument(retryDelay > 0, "retryDelay should be positive");
 
-  public AsyncCallableService(Callable<T> task, long timeout, long delay,
-                              ScheduledExecutorService executorService) {
     this.task = task;
     this.executorService = executorService;
     this.timeout = timeout;
-    this.delay = delay;
+    this.retryDelay = retryDelay;
+    this.taskName = taskName;
   }
 
   @Override
-  public T call() {
-
-    long startTimeInMillis = Calendar.getInstance().getTimeInMillis();
-    LOG.info("Task execution started at: {}", startTimeInMillis);
-
-    // task execution started on a new thread
+  public T call() throws Exception {
+    long startTime = System.currentTimeMillis();
+    long timeLeft = timeout;
     Future<T> future = executorService.submit(task);
-
-    while (!taskCompleted(future)) {
-      if (!timeoutExceeded(startTimeInMillis)) {
-        LOG.debug("Retrying task execution in [ {} ] milliseconds.", delay);
-        future = executorService.schedule(task, delay, TimeUnit.MILLISECONDS);
-      } else {
-        LOG.debug("Timout exceeded, cancelling task ... ");
-        // making sure the task gets cancelled!
-        if (!future.isDone()) {
-          boolean cancelled = future.cancel(true);
-          LOG.debug("Task cancelled: {}", cancelled);
-        } else {
-          LOG.debug("Task already done.");
+    LOG.info("Task {} execution started at {}", taskName, startTime);
+
+    while (true) {
+      try {
+        LOG.debug("Task {} waiting for result at most {} ms", taskName, timeLeft);
+        T taskResult = future.get(timeLeft, TimeUnit.MILLISECONDS);
+        LOG.info("Task {} successfully completed with result: {}", taskName, taskResult);
+        return taskResult;
+      } catch (TimeoutException e) {
+        LOG.debug("Task {} timeout", taskName);
+        timeLeft = 0;
+      } catch (ExecutionException e) {
+        Throwable cause = e.getCause();
+        if (!(cause instanceof RetryTaskSilently)) {
+          LOG.info(String.format("Task %s exception during execution", taskName), cause);
         }
-        LOG.info("Timeout exceeded, task execution won't be retried!");
-        // exit the "retry" loop!
-        break;
+        timeLeft = timeout - (System.currentTimeMillis() - startTime);
       }
-    }
 
-    LOG.info("Exiting Async task execution with the result: [ {} ]", serviceResult);
-    return serviceResult;
-  }
+      if (timeLeft < retryDelay) {
+        attemptToCancel(future);
+        LOG.warn("Task {} timeout exceeded, no more retries", taskName);
+        return null;
+      }
 
-  private boolean taskCompleted(Future<T> future) {
-    boolean completed = false;
-    try {
-      LOG.debug("Retrieving task execution result ...");
-      // should receive task execution result within the configured timeout interval
-      // exceptions thrown from the task are propagated here
-      T taskResult = future.get(timeout, TimeUnit.MILLISECONDS);
-
-      // task failures are expected to be reportesd as exceptions
-      LOG.debug("Task successfully executed: {}", taskResult);
-      setServiceResult(taskResult);
-      errors.clear();
-      completed = true;
-    } catch (Exception e) {
-      // Future.isDone always true here!
-      LOG.info("Exception during task execution: ", e);
-      errors.add(e);
+      LOG.debug("Task {} retrying execution in {} milliseconds", taskName, retryDelay);
+      future = executorService.schedule(task, retryDelay, TimeUnit.MILLISECONDS);
     }
-    return completed;
   }
 
-  private boolean timeoutExceeded(long startTimeInMillis) {
-    return timeout < Calendar.getInstance().getTimeInMillis() - startTimeInMillis;
-  }
-
-  private void setServiceResult(T serviceResult) {
-    this.serviceResult = serviceResult;
+  private void attemptToCancel(Future<?> future) {
+    LOG.debug("Task {} timeout exceeded, cancelling", taskName);
+    if (!future.isDone() && future.cancel(true)) {
+      LOG.debug("Task {} cancelled", taskName);
+    } else {
+      LOG.debug("Task {} already done", taskName);
+    }
   }
 
-  public Set<Exception> getErrors() {
-    return errors;
+  public static class RetryTaskSilently extends RuntimeException {
+    // marker, throw if the task needs to be retried
   }
 
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/ff9ae55c/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyManager.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyManager.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyManager.java
index 9769fae..d07dec0 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyManager.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyManager.java
@@ -100,9 +100,6 @@ public class TopologyManager {
   public static final String TOPOLOGY_RESOLVED_TAG = "TOPOLOGY_RESOLVED";
   public static final String KDC_ADMIN_CREDENTIAL = "kdc.admin.credential";
 
-  private static final String CLUSTER_ENV_CONFIG_TYPE_NAME = "cluster-env";
-  private static final String CLUSTER_CONFIG_TASK_MAX_TIME_IN_MILLIS_PROPERTY_NAME = "cluster_configure_task_timeout";
-
   private PersistedState persistedState;
 
   /**
@@ -1119,27 +1116,8 @@ public class TopologyManager {
    * @param configurationRequest  configuration request to be executed
    */
   private void addClusterConfigRequest(ClusterTopology topology, ClusterConfigurationRequest configurationRequest) {
-
-    String timeoutStr = topology.getConfiguration().getPropertyValue(CLUSTER_ENV_CONFIG_TYPE_NAME,
-        CLUSTER_CONFIG_TASK_MAX_TIME_IN_MILLIS_PROPERTY_NAME);
-
-    long timeout = 1000 * 60 * 30; // 30 minutes
-    long delay = 1000; //ms
-
-    if (timeoutStr != null) {
-      timeout = Long.parseLong(timeoutStr);
-      LOG.debug("ConfigureClusterTask timeout set to: {}", timeout);
-    } else {
-      LOG.debug("No timeout constraints found in configuration. Wired defaults will be applied.");
-    }
-
-    ConfigureClusterTask configureClusterTask = configureClusterTaskFactory.createConfigureClusterTask(topology,
-      configurationRequest, ambariEventPublisher);
-
-    AsyncCallableService<Boolean> asyncCallableService = new AsyncCallableService<>(configureClusterTask, timeout, delay,
-        Executors.newScheduledThreadPool(1));
-
-    executor.submit(asyncCallableService);
+    ConfigureClusterTask task = configureClusterTaskFactory.createConfigureClusterTask(topology, configurationRequest, ambariEventPublisher);
+    executor.submit(new AsyncCallableService<>(task, task.getTimeout(), task.getRepeatDelay(),"ConfigureClusterTask"));
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/ff9ae55c/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/ConfigureClusterTask.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/ConfigureClusterTask.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/ConfigureClusterTask.java
index 60eaa59..0f13ec2 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/ConfigureClusterTask.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/tasks/ConfigureClusterTask.java
@@ -20,11 +20,16 @@ package org.apache.ambari.server.topology.tasks;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.Map;
+import java.util.Set;
 import java.util.concurrent.Callable;
+import java.util.concurrent.TimeUnit;
 
+import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.events.ClusterConfigFinishedEvent;
 import org.apache.ambari.server.events.publishers.AmbariEventPublisher;
 import org.apache.ambari.server.security.authorization.internal.RunWithInternalSecurityContext;
+import org.apache.ambari.server.state.ConfigHelper;
+import org.apache.ambari.server.topology.AsyncCallableService;
 import org.apache.ambari.server.topology.ClusterConfigurationRequest;
 import org.apache.ambari.server.topology.ClusterTopology;
 import org.apache.ambari.server.topology.HostGroupInfo;
@@ -32,16 +37,23 @@ import org.apache.ambari.server.topology.TopologyManager;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
 import com.google.inject.assistedinject.Assisted;
 import com.google.inject.assistedinject.AssistedInject;
 
 public class ConfigureClusterTask implements Callable<Boolean> {
 
+  private static final long DEFAULT_TIMEOUT = TimeUnit.MINUTES.toMillis(30);
+  private static final long REPEAT_DELAY = TimeUnit.SECONDS.toMillis(1);
+  private static final String TIMEOUT_PROPERTY_NAME = "cluster_configure_task_timeout";
   private static final Logger LOG = LoggerFactory.getLogger(ConfigureClusterTask.class);
 
-  private ClusterConfigurationRequest configRequest;
-  private ClusterTopology topology;
-  private AmbariEventPublisher ambariEventPublisher;
+  private final ClusterConfigurationRequest configRequest;
+  private final ClusterTopology topology;
+  private final AmbariEventPublisher ambariEventPublisher;
+  private final Map<String, Integer> previousHostCounts = Maps.newHashMap();
+  private final Set<String> missingHostGroups = Sets.newHashSet();
 
   @AssistedInject
   public ConfigureClusterTask(@Assisted ClusterTopology topology, @Assisted ClusterConfigurationRequest configRequest,
@@ -54,55 +66,56 @@ public class ConfigureClusterTask implements Callable<Boolean> {
   @Override
   @RunWithInternalSecurityContext(token = TopologyManager.INTERNAL_AUTH_TOKEN)
   public Boolean call() throws Exception {
-    LOG.info("TopologyManager.ConfigureClusterTask: Entering");
+    LOG.debug("Entering");
 
     Collection<String> requiredHostGroups = getTopologyRequiredHostGroups();
 
-    if (!areRequiredHostGroupsResolved(requiredHostGroups)) {
-      LOG.debug("TopologyManager.ConfigureClusterTask - prerequisites for config request processing not yet " +
-        "satisfied");
-      throw new IllegalArgumentException("TopologyManager.ConfigureClusterTask - prerequisites for config " +
-        "request processing not yet  satisfied");
+    if (!areHostGroupsResolved(requiredHostGroups)) {
+      LOG.info("Some host groups require more hosts, cluster configuration cannot begin");
+      throw new AsyncCallableService.RetryTaskSilently();
     }
 
-    try {
-      LOG.info("TopologyManager.ConfigureClusterTask: All Required host groups are completed, Cluster " +
-        "Configuration can now begin");
-      configRequest.process();
-    } catch (Exception e) {
-      LOG.error("TopologyManager.ConfigureClusterTask: " +
-        "An exception occurred while attempting to process cluster configs and set on cluster: ", e);
-
-      // this will signal an unsuccessful run, retry will be triggered if required
-      throw new Exception(e);
-    }
+    LOG.info("All required host groups are complete, cluster configuration can now begin");
+    configRequest.process();
+    LOG.info("Cluster configuration finished successfully");
 
-    LOG.info("Cluster configuration finished successfully!");
-    // Notify listeners that cluster configuration finished
-    long clusterId = topology.getClusterId();
-    ambariEventPublisher.publish(new ClusterConfigFinishedEvent(clusterId,
-            topology.getAmbariContext().getClusterName(clusterId)));
+    notifyListeners();
 
-    LOG.info("TopologyManager.ConfigureClusterTask: Exiting");
+    LOG.debug("Exiting");
     return true;
   }
 
+  public long getTimeout() {
+    long timeout = DEFAULT_TIMEOUT;
+
+    String timeoutStr = topology.getConfiguration().getPropertyValue(ConfigHelper.CLUSTER_ENV, TIMEOUT_PROPERTY_NAME);
+    if (timeoutStr != null) {
+      try {
+        timeout = Long.parseLong(timeoutStr);
+        LOG.info("Using custom timeout: {} ms", timeout);
+      } catch (NumberFormatException e) {
+        // use default
+      }
+    }
+
+    return timeout;
+  }
+
+  public long getRepeatDelay() {
+    return REPEAT_DELAY;
+  }
+
   /**
    * Return the set of host group names which are required for configuration topology resolution.
-   *
-   * @return set of required host group names
    */
   private Collection<String> getTopologyRequiredHostGroups() {
-    Collection<String> requiredHostGroups;
     try {
-      requiredHostGroups = configRequest.getRequiredHostGroups();
+      return configRequest.getRequiredHostGroups();
     } catch (RuntimeException e) {
       // just log error and allow config topology update
-      LOG.error("TopologyManager.ConfigureClusterTask: An exception occurred while attempting to determine required" +
-        " host groups for config update ", e);
-      requiredHostGroups = Collections.emptyList();
+      LOG.error("Could not determine required host groups", e);
+      return Collections.emptyList();
     }
-    return requiredHostGroups;
   }
 
   /**
@@ -111,23 +124,44 @@ public class ConfigureClusterTask implements Callable<Boolean> {
    * @param requiredHostGroups set of required host groups
    * @return true if all required host groups are resolved
    */
-  private boolean areRequiredHostGroupsResolved(Collection<String> requiredHostGroups) {
-    boolean configTopologyResolved = true;
+  private boolean areHostGroupsResolved(Collection<String> requiredHostGroups) {
+    boolean allHostGroupsResolved = true;
     Map<String, HostGroupInfo> hostGroupInfo = topology.getHostGroupInfo();
     for (String hostGroup : requiredHostGroups) {
       HostGroupInfo groupInfo = hostGroupInfo.get(hostGroup);
-      if (groupInfo == null || groupInfo.getHostNames().size() < groupInfo.getRequestedHostCount()) {
-        configTopologyResolved = false;
-        if (groupInfo != null) {
-          LOG.info("TopologyManager.ConfigureClusterTask areHostGroupsResolved: host group name = {} requires {} hosts to be mapped, but only {} are available.",
-            groupInfo.getHostGroupName(), groupInfo.getRequestedHostCount(), groupInfo.getHostNames().size());
+      if (groupInfo == null) {
+        allHostGroupsResolved = false;
+        if (missingHostGroups.add(hostGroup)) {
+          LOG.warn("Host group '{}' is missing from cluster creation request", hostGroup);
         }
-        break;
       } else {
-        LOG.info("TopologyManager.ConfigureClusterTask areHostGroupsResolved: host group name = {} has been fully resolved, as all {} required hosts are mapped to {} physical hosts.",
-          groupInfo.getHostGroupName(), groupInfo.getRequestedHostCount(), groupInfo.getHostNames().size());
+        int actualHostCount = groupInfo.getHostNames().size();
+        int requestedHostCount = groupInfo.getRequestedHostCount();
+        boolean hostGroupReady = actualHostCount >= requestedHostCount;
+        allHostGroupsResolved &= hostGroupReady;
+
+        Integer previousHostCount = previousHostCounts.put(hostGroup, actualHostCount);
+        if (previousHostCount == null || previousHostCount != actualHostCount) {
+          if (hostGroupReady) {
+            LOG.info("Host group '{}' resolved, requires {} hosts and {} are available",
+              groupInfo.getHostGroupName(), requestedHostCount, actualHostCount
+            );
+          } else {
+            LOG.info("Host group '{}' pending, requires {} hosts, but only {} are available",
+              groupInfo.getHostGroupName(), requestedHostCount, actualHostCount
+            );
+          }
+        }
       }
     }
-    return configTopologyResolved;
+
+    return allHostGroupsResolved;
   }
+
+  private void notifyListeners() throws AmbariException {
+    long clusterId = topology.getClusterId();
+    String clusterName = topology.getAmbariContext().getClusterName(clusterId);
+    ambariEventPublisher.publish(new ClusterConfigFinishedEvent(clusterId, clusterName));
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/ff9ae55c/ambari-server/src/test/java/org/apache/ambari/server/topology/AsyncCallableServiceTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/AsyncCallableServiceTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/AsyncCallableServiceTest.java
index 4d96294..edc0954 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/topology/AsyncCallableServiceTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/AsyncCallableServiceTest.java
@@ -19,10 +19,8 @@
 package org.apache.ambari.server.topology;
 
 import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.verify;
 
 import java.util.concurrent.Callable;
-import java.util.concurrent.Executors;
 import java.util.concurrent.ScheduledExecutorService;
 import java.util.concurrent.ScheduledFuture;
 import java.util.concurrent.TimeUnit;
@@ -33,14 +31,13 @@ import org.easymock.EasyMockSupport;
 import org.easymock.Mock;
 import org.easymock.MockType;
 import org.junit.Assert;
-import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 public class AsyncCallableServiceTest extends EasyMockSupport {
-  public static final Logger LOGGER = LoggerFactory.getLogger(AsyncCallableService.class);
+
+  private static final long TIMEOUT = 1000; // default timeout
+  private static final long RETRY_DELAY = 50; // default delay between tries
 
   @Rule
   public EasyMockRule mocks = new EasyMockRule(this);
@@ -54,129 +51,91 @@ public class AsyncCallableServiceTest extends EasyMockSupport {
   @Mock
   private ScheduledFuture<Boolean> futureMock;
 
-  private long timeout;
-
-  private long delay;
-
   private AsyncCallableService<Boolean> asyncCallableService;
 
-  @Before
-  public void setup() {
-    // default timeout, overwrite it if necessary
-    timeout = 1000;
-
-    // default delay between tries
-    delay = 500;
-  }
-
-
   @Test
   public void testCallableServiceShouldCancelTaskWhenTimeoutExceeded() throws Exception {
     // GIVEN
-
-    //the timeout period should be less zero for guaranteed timeout!
-    timeout = -1l;
-
-    // the task to be executed never completes successfully
+    long timeout = -1; // guaranteed timeout
     expect(futureMock.get(timeout, TimeUnit.MILLISECONDS)).andThrow(new TimeoutException("Testing the timeout exceeded case"));
     expect(futureMock.isDone()).andReturn(Boolean.FALSE);
-
-    // this is only called when a timeout occurs
     expect(futureMock.cancel(true)).andReturn(Boolean.TRUE);
-
     expect(executorServiceMock.submit(taskMock)).andReturn(futureMock);
-
     replayAll();
 
-    asyncCallableService = new AsyncCallableService<>(taskMock, timeout, delay, executorServiceMock);
+    asyncCallableService = new AsyncCallableService<>(taskMock, timeout, RETRY_DELAY, "test", executorServiceMock);
 
     // WHEN
     Boolean serviceResult = asyncCallableService.call();
 
     // THEN
-    verify();
-    Assert.assertNull("Service result must be null", serviceResult);
-    Assert.assertFalse("The service should have errors!", asyncCallableService.getErrors().isEmpty());
+    verifyAll();
+    Assert.assertNull("No result expected in case of timeout", serviceResult);
   }
 
   @Test
   public void testCallableServiceShouldCancelTaskWhenTaskHangsAndTimeoutExceeded() throws Exception {
     // GIVEN
     //the task call hangs, it doesn't return within a reasonable period of time
-    Callable<Boolean> hangingTask = new Callable<Boolean>() {
-      @Override
-      public Boolean call() throws Exception {
-        Thread.sleep(10000000);
-        return false;
-      }
+    Callable<Boolean> hangingTask = () -> {
+      Thread.sleep(10000000);
+      return false;
     };
 
-    asyncCallableService = new AsyncCallableService<>(hangingTask, timeout, delay, Executors.newScheduledThreadPool(2));
+    asyncCallableService = new AsyncCallableService<>(hangingTask, TIMEOUT, RETRY_DELAY,  "test");
 
     // WHEN
     Boolean serviceResult = asyncCallableService.call();
 
     // THEN
-    Assert.assertNull("Service result must be null", serviceResult);
-    Assert.assertFalse("The service should have errors!", asyncCallableService.getErrors().isEmpty());
+    Assert.assertNull("No result expected from hanging task", serviceResult);
   }
 
   @Test
   public void testCallableServiceShouldExitWhenTaskCompleted() throws Exception {
     // GIVEN
-    // the task to be executed never completes successfully
-    expect(taskMock.call()).andReturn(Boolean.TRUE).times(1);
-
+    expect(taskMock.call()).andReturn(Boolean.TRUE);
     replayAll();
-    asyncCallableService = new AsyncCallableService<>(taskMock, timeout, delay, Executors.newScheduledThreadPool(2));
+    asyncCallableService = new AsyncCallableService<>(taskMock, TIMEOUT, RETRY_DELAY,  "test");
 
     // WHEN
     Boolean serviceResult = asyncCallableService.call();
 
     // THEN
-    verify();
-    Assert.assertNotNull("Service result must not be null", serviceResult);
-    Assert.assertTrue(serviceResult);
+    verifyAll();
+    Assert.assertEquals(Boolean.TRUE, serviceResult);
   }
 
   @Test
   public void testCallableServiceShouldRetryTaskExecutionTillTimeoutExceededWhenTaskThrowsException() throws Exception {
     // GIVEN
-
-    // the task to be throws exception
     expect(taskMock.call()).andThrow(new IllegalStateException("****************** TESTING ****************")).times(2, 3);
     replayAll();
-    asyncCallableService = new AsyncCallableService<>(taskMock, timeout, delay, Executors.newScheduledThreadPool(2));
+    asyncCallableService = new AsyncCallableService<>(taskMock, TIMEOUT, RETRY_DELAY,  "test");
 
     // WHEN
     Boolean serviceResult = asyncCallableService.call();
 
     // THEN
-    verify();
-    // THEN
-    Assert.assertNull("Service result must be null", serviceResult);
-
+    verifyAll();
+    Assert.assertNull("No result expected from throwing task", serviceResult);
   }
 
 
   @Test
   public void testShouldAsyncCallableServiceRetryExecutionWhenTaskThrowsException() throws Exception {
     // GIVEN
-    //the task call hangs, it doesn't return within a reasonable period of time
-    Callable<Boolean> hangingTask = new Callable<Boolean>() {
-      @Override
-      public Boolean call() throws Exception {
-        throw new IllegalStateException("****************** TESTING ****************");
-      }
+    // the task throws exception
+    Callable<Boolean> throwingTask = () -> {
+      throw new IllegalStateException("****************** TESTING ****************");
     };
 
-    asyncCallableService = new AsyncCallableService<>(hangingTask, timeout, delay, Executors.newScheduledThreadPool(2));
+    asyncCallableService = new AsyncCallableService<>(throwingTask, TIMEOUT, RETRY_DELAY,  "test");
 
     // WHEN
     Boolean serviceResult = asyncCallableService.call();
 
     // THEN
-    verify();
-    Assert.assertNull("Service result must be null", serviceResult);
+    Assert.assertNull("No result expected from throwing task", serviceResult);
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/ff9ae55c/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterDeployWithStartOnlyTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterDeployWithStartOnlyTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterDeployWithStartOnlyTest.java
index 0daa20f..aecc6cb 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterDeployWithStartOnlyTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterDeployWithStartOnlyTest.java
@@ -61,6 +61,7 @@ import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.ComponentInfo;
 import org.apache.ambari.server.state.SecurityType;
+import org.apache.ambari.server.topology.tasks.ConfigureClusterTask;
 import org.apache.ambari.server.topology.tasks.ConfigureClusterTaskFactory;
 import org.apache.ambari.server.topology.validators.TopologyValidatorService;
 import org.easymock.Capture;
@@ -152,6 +153,8 @@ public class ClusterDeployWithStartOnlyTest extends EasyMockSupport {
   private ComponentInfo clientComponentInfo;
   @Mock(type = MockType.NICE)
   private ConfigureClusterTaskFactory configureClusterTaskFactory;
+  @Mock(type = MockType.NICE)
+  private ConfigureClusterTask configureClusterTask;
 
   @Mock(type = MockType.STRICT)
   private Future mockFuture;
@@ -391,6 +394,9 @@ public class ClusterDeployWithStartOnlyTest extends EasyMockSupport {
     ambariContext.persistInstallStateForUI(CLUSTER_NAME, STACK_NAME, STACK_VERSION);
     expectLastCall().once();
 
+    expect(configureClusterTaskFactory.createConfigureClusterTask(anyObject(), anyObject(), anyObject())).andReturn(configureClusterTask);
+    expect(configureClusterTask.getTimeout()).andReturn(1000L);
+    expect(configureClusterTask.getRepeatDelay()).andReturn(50L);
     expect(executor.submit(anyObject(AsyncCallableService.class))).andReturn(mockFuture).times(1);
 
     persistedTopologyRequest = new PersistedTopologyRequest(1, request);

http://git-wip-us.apache.org/repos/asf/ambari/blob/ff9ae55c/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartOnComponentLevelTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartOnComponentLevelTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartOnComponentLevelTest.java
index bbf4fdb..a4b2160 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartOnComponentLevelTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartOnComponentLevelTest.java
@@ -62,6 +62,7 @@ import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.ComponentInfo;
 import org.apache.ambari.server.state.SecurityType;
+import org.apache.ambari.server.topology.tasks.ConfigureClusterTask;
 import org.apache.ambari.server.topology.tasks.ConfigureClusterTaskFactory;
 import org.apache.ambari.server.topology.validators.TopologyValidatorService;
 import org.easymock.Capture;
@@ -143,6 +144,8 @@ public class ClusterInstallWithoutStartOnComponentLevelTest extends EasyMockSupp
   private HostRoleCommand hostRoleCommand;
   @Mock(type = MockType.NICE)
   private ConfigureClusterTaskFactory configureClusterTaskFactory;
+  @Mock(type = MockType.NICE)
+  private ConfigureClusterTask configureClusterTask;
 
 
   @Mock(type = MockType.NICE)
@@ -368,6 +371,9 @@ public class ClusterInstallWithoutStartOnComponentLevelTest extends EasyMockSupp
     ambariContext.persistInstallStateForUI(CLUSTER_NAME, STACK_NAME, STACK_VERSION);
     expectLastCall().once();
 
+    expect(configureClusterTaskFactory.createConfigureClusterTask(anyObject(), anyObject(), anyObject())).andReturn(configureClusterTask);
+    expect(configureClusterTask.getTimeout()).andReturn(1000L);
+    expect(configureClusterTask.getRepeatDelay()).andReturn(50L);
     expect(executor.submit(anyObject(AsyncCallableService.class))).andReturn(mockFuture).times(1);
 
     persistedTopologyRequest = new PersistedTopologyRequest(1, request);

http://git-wip-us.apache.org/repos/asf/ambari/blob/ff9ae55c/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartTest.java
index 059a8be..d89c8ca 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartTest.java
@@ -62,6 +62,7 @@ import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.ComponentInfo;
 import org.apache.ambari.server.state.SecurityType;
+import org.apache.ambari.server.topology.tasks.ConfigureClusterTask;
 import org.apache.ambari.server.topology.tasks.ConfigureClusterTaskFactory;
 import org.apache.ambari.server.topology.validators.TopologyValidatorService;
 import org.easymock.Capture;
@@ -145,6 +146,8 @@ public class ClusterInstallWithoutStartTest extends EasyMockSupport {
   private HostRoleCommand hostRoleCommand;
   @Mock(type = MockType.NICE)
   private ConfigureClusterTaskFactory configureClusterTaskFactory;
+  @Mock(type = MockType.NICE)
+  private ConfigureClusterTask configureClusterTask;
 
 
   @Mock(type = MockType.NICE)
@@ -363,6 +366,9 @@ public class ClusterInstallWithoutStartTest extends EasyMockSupport {
     ambariContext.persistInstallStateForUI(CLUSTER_NAME, STACK_NAME, STACK_VERSION);
     expectLastCall().once();
 
+    expect(configureClusterTaskFactory.createConfigureClusterTask(anyObject(), anyObject(), anyObject())).andReturn(configureClusterTask);
+    expect(configureClusterTask.getTimeout()).andReturn(1000L);
+    expect(configureClusterTask.getRepeatDelay()).andReturn(50L);
     expect(executor.submit(anyObject(AsyncCallableService.class))).andReturn(mockFuture).times(1);
 
     persistedTopologyRequest = new PersistedTopologyRequest(1, request);

http://git-wip-us.apache.org/repos/asf/ambari/blob/ff9ae55c/ambari-server/src/test/java/org/apache/ambari/server/topology/ConfigureClusterTaskTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/ConfigureClusterTaskTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/ConfigureClusterTaskTest.java
index b2dac8f..f4afbea 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/topology/ConfigureClusterTaskTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/ConfigureClusterTaskTest.java
@@ -20,38 +20,27 @@ package org.apache.ambari.server.topology;
 
 import static org.easymock.EasyMock.anyObject;
 import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.replay;
-import static org.easymock.EasyMock.reset;
-import static org.easymock.EasyMock.verify;
 
-import java.util.Arrays;
-import java.util.Collection;
 import java.util.Collections;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.concurrent.Executors;
 
+import org.apache.ambari.server.events.AmbariEvent;
 import org.apache.ambari.server.events.publishers.AmbariEventPublisher;
 import org.apache.ambari.server.topology.tasks.ConfigureClusterTask;
 import org.easymock.EasyMockRule;
+import org.easymock.EasyMockSupport;
 import org.easymock.Mock;
 import org.easymock.MockType;
+import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import junit.framework.Assert;
 
 /**
  * Unit test for the ConfigureClusterTask class.
  * As business methods of this class don't return values, the assertions are made by verifying method calls on mocks.
  * Thus having strict mocks is essential!
  */
-public class ConfigureClusterTaskTest {
-
-  private static final Logger LOGGER = LoggerFactory.getLogger(ConfigureClusterTaskTest.class);
+public class ConfigureClusterTaskTest extends EasyMockSupport {
 
   @Rule
   public EasyMockRule mocks = new EasyMockRule(this);
@@ -72,70 +61,45 @@ public class ConfigureClusterTaskTest {
 
   @Before
   public void before() {
-    reset(clusterConfigurationRequest, clusterTopology, ambariContext, ambariEventPublisher);
+    resetAll();
     testSubject = new ConfigureClusterTask(clusterTopology, clusterConfigurationRequest, ambariEventPublisher);
   }
 
   @Test
-  public void testShouldConfigureClusterTaskLogicBeExecutedWhenRequiredHostgroupsAreResolved() throws
-      Exception {
+  public void taskShouldBeExecutedIfRequiredHostgroupsAreResolved() throws Exception {
     // GIVEN
-    // is it OK to handle the non existence of hostgroups as a success?!
     expect(clusterConfigurationRequest.getRequiredHostGroups()).andReturn(Collections.emptyList());
     expect(clusterTopology.getHostGroupInfo()).andReturn(Collections.emptyMap());
     expect(clusterTopology.getClusterId()).andReturn(1L).anyTimes();
     expect(clusterTopology.getAmbariContext()).andReturn(ambariContext);
     expect(ambariContext.getClusterName(1L)).andReturn("testCluster");
-
-    // this is only called if the "prerequisites" are satisfied
     clusterConfigurationRequest.process();
-    ambariEventPublisher.publish(anyObject());
-
-    replay(clusterConfigurationRequest, clusterTopology, ambariContext, ambariEventPublisher);
+    ambariEventPublisher.publish(anyObject(AmbariEvent.class));
+    replayAll();
 
     // WHEN
     Boolean result = testSubject.call();
 
     // THEN
-    verify();
+    verifyAll();
     Assert.assertTrue(result);
   }
 
   @Test
   public void testsShouldConfigureClusterTaskExecuteWhenCalledFromAsyncCallableService() throws Exception {
     // GIVEN
-    // is it OK to handle the non existence of hostgroups as a success?!
     expect(clusterConfigurationRequest.getRequiredHostGroups()).andReturn(Collections.emptyList());
     expect(clusterTopology.getHostGroupInfo()).andReturn(Collections.emptyMap());
-
-    // this is only called if the "prerequisites" are satisfied
     clusterConfigurationRequest.process();
+    replayAll();
 
-    replay(clusterConfigurationRequest, clusterTopology);
-
-    AsyncCallableService<Boolean> asyncService = new AsyncCallableService<>(testSubject, 5000, 500, Executors
-        .newScheduledThreadPool(3));
+    AsyncCallableService<Boolean> asyncService = new AsyncCallableService<>(testSubject, 5000, 500, "test");
 
     // WHEN
     asyncService.call();
-    // THEN
-
 
+    // THEN
+    verifyAll();
   }
 
-  private Collection<String> mockRequiredHostGroups() {
-    return Arrays.asList("test-hostgroup-1");
-  }
-
-  private Map<String, HostGroupInfo> mockHostGroupInfo() {
-    Map<String, HostGroupInfo> hostGroupInfoMap = new HashMap<>();
-    HostGroupInfo hostGroupInfo = new HostGroupInfo("test-hostgroup-1");
-    hostGroupInfo.addHost("test-host-1");
-    hostGroupInfo.setRequestedCount(2);
-
-    hostGroupInfoMap.put("test-hostgroup-1", hostGroupInfo);
-    return hostGroupInfoMap;
-  }
-
-
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/ff9ae55c/ambari-server/src/test/java/org/apache/ambari/server/topology/TopologyManagerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/TopologyManagerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/TopologyManagerTest.java
index ac643d7..5f61c85 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/topology/TopologyManagerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/TopologyManagerTest.java
@@ -66,6 +66,7 @@ import org.apache.ambari.server.security.encryption.CredentialStoreService;
 import org.apache.ambari.server.stack.NoSuchStackException;
 import org.apache.ambari.server.state.SecurityType;
 import org.apache.ambari.server.state.quicklinksprofile.QuickLinksProfile;
+import org.apache.ambari.server.topology.tasks.ConfigureClusterTask;
 import org.apache.ambari.server.topology.tasks.ConfigureClusterTaskFactory;
 import org.apache.ambari.server.topology.validators.TopologyValidatorService;
 import org.easymock.Capture;
@@ -156,7 +157,8 @@ public class TopologyManagerTest {
   private ClusterTopology clusterTopologyMock;
   @Mock(type = MockType.NICE)
   private ConfigureClusterTaskFactory configureClusterTaskFactory;
-
+  @Mock(type = MockType.NICE)
+  private ConfigureClusterTask configureClusterTask;
 
   @Mock(type = MockType.STRICT)
   private Future mockFuture;
@@ -343,9 +345,10 @@ public class TopologyManagerTest {
 
     expect(clusterController.ensureResourceProvider(anyObject(Resource.Type.class))).andReturn(resourceProvider);
 
-    expect(executor.submit(anyObject(AsyncCallableService.class))).andReturn(mockFuture);
-
-    expectLastCall().anyTimes();
+    expect(configureClusterTaskFactory.createConfigureClusterTask(anyObject(), anyObject(), anyObject())).andReturn(configureClusterTask);
+    expect(configureClusterTask.getTimeout()).andReturn(1000L);
+    expect(configureClusterTask.getRepeatDelay()).andReturn(50L);
+    expect(executor.submit(anyObject(AsyncCallableService.class))).andReturn(mockFuture).anyTimes();
 
     expect(persistedState.persistTopologyRequest(request)).andReturn(persistedTopologyRequest).anyTimes();
     persistedState.persistLogicalRequest(logicalRequest, 1);
@@ -527,7 +530,8 @@ public class TopologyManagerTest {
     replay(blueprint, stack, request, group1, group2, ambariContext, logicalRequestFactory,
             configurationRequest, configurationRequest2, configurationRequest3, executor,
             persistedState, clusterTopologyMock, securityConfigurationFactory, credentialStoreService,
-            clusterController, resourceProvider, mockFuture, requestStatusResponse, logicalRequest, settingDAO);
+            clusterController, resourceProvider, mockFuture, requestStatusResponse, logicalRequest, settingDAO,
+            configureClusterTaskFactory, configureClusterTask);
   }
 
   @Test(expected = InvalidTopologyException.class)


[13/50] [abbrv] ambari git commit: AMBARI-22071. During Upgrade the Icons Showing Required Services Is Wrong (alexantonenko)

Posted by rl...@apache.org.
AMBARI-22071. During Upgrade the Icons Showing Required Services Is Wrong (alexantonenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/81354ffd
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/81354ffd
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/81354ffd

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 81354ffd2b32f22178f98c8588974faa98a136dd
Parents: b027837
Author: Alex Antonenko <aa...@hortonworks.com>
Authored: Wed Sep 27 16:28:08 2017 +0300
Committer: Alex Antonenko <aa...@hortonworks.com>
Committed: Wed Sep 27 16:28:08 2017 +0300

----------------------------------------------------------------------
 .../main/admin/stack_upgrade/upgrade_version_column_view.js    | 2 +-
 .../admin/stack_upgrade/upgrade_version_column_view_test.js    | 6 ++++++
 2 files changed, 7 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/81354ffd/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_column_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_column_view.js b/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_column_view.js
index b0074a9..67bde4f 100644
--- a/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_column_view.js
+++ b/ambari-web/app/views/main/admin/stack_upgrade/upgrade_version_column_view.js
@@ -64,7 +64,7 @@ App.UpgradeVersionColumnView = App.UpgradeVersionBoxView.extend({
   }.property(),
 
   getNotUpgradable: function(isAvailable, isUpgradable) {
-    return this.get('content.isMaint') && this.get('content.status') !== 'CURRENT' && isAvailable && !isUpgradable;
+    return this.get('content.isMaint') && !this.get('isUpgrading') && this.get('content.status') !== 'CURRENT' && isAvailable && !isUpgradable;
   },
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/81354ffd/ambari-web/test/views/main/admin/stack_upgrade/upgrade_version_column_view_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/admin/stack_upgrade/upgrade_version_column_view_test.js b/ambari-web/test/views/main/admin/stack_upgrade/upgrade_version_column_view_test.js
index e1e477d..4b54892 100644
--- a/ambari-web/test/views/main/admin/stack_upgrade/upgrade_version_column_view_test.js
+++ b/ambari-web/test/views/main/admin/stack_upgrade/upgrade_version_column_view_test.js
@@ -183,5 +183,11 @@ describe('App.UpgradeVersionColumnView', function () {
       view.set('content', versions[3]);
       expect(view.getNotUpgradable(false, true)).to.be.false;
     })
+
+    it ('Should return false for maint, when service is available and not upgradable while is upgrading', function () {
+      view.set('content', versions[3]);
+      view.set('isUpgrading', true);
+      expect(view.getNotUpgradable(false, true)).to.be.false;
+    })
   })
 });


[23/50] [abbrv] ambari git commit: AMBARI-22085. Zeppelin start failed (aonishuk)

Posted by rl...@apache.org.
AMBARI-22085. Zeppelin start failed (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/63733bf2
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/63733bf2
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/63733bf2

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 63733bf29d19cef8a8e8f54fcfb151e4be04bdf5
Parents: b660bf1
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Thu Sep 28 19:07:46 2017 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Sep 28 19:07:46 2017 +0300

----------------------------------------------------------------------
 .pydevproject                                        |  5 +++++
 ambari-agent/.pydevproject                           | 14 ++++++++++++++
 ambari-server/.pydevproject                          |  5 +++++
 .../ZEPPELIN/0.7.0/package/scripts/master.py         | 15 +++++----------
 4 files changed, 29 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/63733bf2/.pydevproject
----------------------------------------------------------------------
diff --git a/.pydevproject b/.pydevproject
new file mode 100644
index 0000000..40e9f40
--- /dev/null
+++ b/.pydevproject
@@ -0,0 +1,5 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<?eclipse-pydev version="1.0"?><pydev_project>
+<pydev_property name="org.python.pydev.PYTHON_PROJECT_INTERPRETER">Default</pydev_property>
+<pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python 2.7</pydev_property>
+</pydev_project>

http://git-wip-us.apache.org/repos/asf/ambari/blob/63733bf2/ambari-agent/.pydevproject
----------------------------------------------------------------------
diff --git a/ambari-agent/.pydevproject b/ambari-agent/.pydevproject
new file mode 100644
index 0000000..c62d80f
--- /dev/null
+++ b/ambari-agent/.pydevproject
@@ -0,0 +1,14 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<?eclipse-pydev version="1.0"?><pydev_project>
+<pydev_property name="org.python.pydev.PYTHON_PROJECT_INTERPRETER">Default</pydev_property>
+<pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python 2.7</pydev_property>
+<pydev_pathproperty name="org.python.pydev.PROJECT_EXTERNAL_SOURCE_PATH">
+<path>/home/user/ambari/ambari-common/src/test/python</path>
+<path>/home/user/ambari/ambari-common/src/main/python</path>
+<path>/home/user/ambari/ambari-common/src/main/python/ambari_jinja2</path>
+</pydev_pathproperty>
+<pydev_pathproperty name="org.python.pydev.PROJECT_SOURCE_PATH">
+<path>/${PROJECT_DIR_NAME}/src/main/python</path>
+<path>/${PROJECT_DIR_NAME}/src/main/python/ambari_agent</path>
+</pydev_pathproperty>
+</pydev_project>

http://git-wip-us.apache.org/repos/asf/ambari/blob/63733bf2/ambari-server/.pydevproject
----------------------------------------------------------------------
diff --git a/ambari-server/.pydevproject b/ambari-server/.pydevproject
new file mode 100644
index 0000000..7ff1370
--- /dev/null
+++ b/ambari-server/.pydevproject
@@ -0,0 +1,5 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<?eclipse-pydev version="1.0"?><pydev_project>
+<pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python 2.7</pydev_property>
+<pydev_property name="org.python.pydev.PYTHON_PROJECT_INTERPRETER">Default</pydev_property>
+</pydev_project>

http://git-wip-us.apache.org/repos/asf/ambari/blob/63733bf2/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
index af5758a..a450fb6 100644
--- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
+++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
@@ -23,7 +23,6 @@ import os
 
 from resource_management.core import shell, sudo
 from resource_management.core.logger import Logger
-from resource_management.core.exceptions import Fail
 from resource_management.core.resources import Directory
 from resource_management.core.resources.system import Execute, File
 from resource_management.core.source import InlineTemplate
@@ -320,20 +319,17 @@ class Master(Script):
       and params.config['configurations']['zeppelin-config']['zeppelin.notebook.storage'] == 'org.apache.zeppelin.notebook.repo.FileSystemNotebookRepo':
 
       if 'zeppelin.config.fs.dir' in params.config['configurations']['zeppelin-config']:
-        try:
+        zeppelin_conf_fs = self.getZeppelinConfFS(params)
+        if os.path.exists(zeppelin_conf_fs):
           # copy from hdfs to /etc/zeppelin/conf/interpreter.json
           params.HdfsResource(interpreter_config,
                               type="file",
                               action="download_on_execute",
-                              source=self.getZeppelinConfFS(params),
-                              user=params.zeppelin_user,
+                              source=zeppelin_conf_fs,
                               group=params.zeppelin_group,
                               owner=params.zeppelin_user)
-        except Fail as fail:
-          if "doesn't exist" not in fail.args[0]:
-            print "Error getting interpreter.json from HDFS"
-            print fail.args
-            raise Fail
+        else:
+          Logger.info(format("{zeppelin_conf_fs} does not exist. Skipping upload of DFS."))
 
     config_content = sudo.read_file(interpreter_config)
     config_data = json.loads(config_content)
@@ -359,7 +355,6 @@ class Master(Script):
                             source=interpreter_config,
                             group=params.zeppelin_group,
                             owner=params.zeppelin_user,
-                            user=params.zeppelin_user,
                             replace_existing_files=True)
 
   def update_kerberos_properties(self):


[49/50] [abbrv] ambari git commit: Merge branch 'trunk' into branch-feature-AMBARI-20859

Posted by rl...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/17243c68/ambari-admin/src/main/resources/ui/admin-web/app/scripts/i18n.config.js
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/17243c68/ambari-server/src/main/java/org/apache/ambari/server/api/resources/ResourceInstanceFactoryImpl.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/17243c68/ambari-server/src/main/java/org/apache/ambari/server/configuration/Configuration.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/17243c68/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
----------------------------------------------------------------------
diff --cc ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
index 8e30400,b2993e3..3955e81
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
@@@ -3596,38 -3683,22 +3609,7 @@@ public class AmbariManagementController
      return deleteStatusMetaData;
    }
  
-   private void deleteHostComponent(ServiceComponent serviceComponent, ServiceComponentHost componentHost) throws AmbariException {
-     String serviceName = serviceComponent.getServiceName();
-     String master_component_name = null;
-     String slave_component_name = componentHost.getServiceComponentName();
-     HostComponentAdminState desiredAdminState = componentHost.getComponentAdminState();
-     State slaveState = componentHost.getState();
-     //Delete hostcomponents
-     serviceComponent.deleteServiceComponentHosts(componentHost.getHostName());
-     // If deleted hostcomponents support decomission and were decommited and stopped or in unknown state
-     if (masterToSlaveMappingForDecom.containsValue(slave_component_name)
-             && desiredAdminState.equals(HostComponentAdminState.DECOMMISSIONED)
-             && (slaveState.equals(State.INSTALLED) || slaveState.equals(State.UNKNOWN))) {
-       for (Entry<String, String> entrySet : masterToSlaveMappingForDecom.entrySet()) {
-         if (entrySet.getValue().equals(slave_component_name)) {
-           master_component_name = entrySet.getKey();
-         }
-       }
- 
-       //Mark master component as needed to restart for remove host info from components UI
-       Cluster cluster = clusters.getCluster(serviceComponent.getClusterName());
-       Service service = cluster.getService(serviceName);
-       ServiceComponent sc = service.getServiceComponent(master_component_name);
- 
-       if (sc != null && sc.isMasterComponent()) {
-         for (ServiceComponentHost sch : sc.getServiceComponentHosts().values()) {
-           sch.setRestartRequired(true);
-         }
-       }
-     }
-   }
- 
    @Override
 -  public void deleteUsers(Set<UserRequest> requests)
 -    throws AmbariException {
 -
 -    for (UserRequest r : requests) {
 -      if (LOG.isDebugEnabled()) {
 -        LOG.debug("Received a delete user request, username={}", r.getUsername());
 -      }
 -      User u = users.getAnyUser(r.getUsername());
 -      if (null != u) {
 -        users.removeUser(u);
 -      }
 -    }
 -  }
 -
 -  @Override
    public void deleteGroups(Set<GroupRequest> requests) throws AmbariException {
      for (GroupRequest request: requests) {
        LOG.debug("Received a delete group request, groupname={}", request.getGroupName());

http://git-wip-us.apache.org/repos/asf/ambari/blob/17243c68/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/17243c68/ambari-server/src/main/java/org/apache/ambari/server/controller/ResourceProviderFactory.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/17243c68/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractControllerResourceProvider.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/17243c68/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariPamAuthenticationProvider.java
----------------------------------------------------------------------
diff --cc ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariPamAuthenticationProvider.java
index b9bcff6,8678294..0823729
--- a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariPamAuthenticationProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/AmbariPamAuthenticationProvider.java
@@@ -50,9 -52,11 +51,11 @@@ public class AmbariPamAuthenticationPro
    @Inject
    private Users users;
    @Inject
 -  protected UserDAO userDAO;
 +  private UserDAO userDAO;
    @Inject
 -  protected GroupDAO groupDAO;
 +  private GroupDAO groupDAO;
+   @Inject
+   private PamAuthenticationFactory pamAuthenticationFactory;
  
    private static final Logger LOG = LoggerFactory.getLogger(AmbariPamAuthenticationProvider.class);
  

http://git-wip-us.apache.org/repos/asf/ambari/blob/17243c68/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/Users.java
----------------------------------------------------------------------
diff --cc ambari-server/src/main/java/org/apache/ambari/server/security/authorization/Users.java
index d4eae9d,91cefe9..2dedc9e
--- a/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/Users.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/security/authorization/Users.java
@@@ -28,10 -27,11 +28,11 @@@ import java.util.List
  import java.util.Map;
  import java.util.Set;
  
 -import javax.inject.Inject;
  import javax.persistence.EntityManager;
 +import javax.persistence.OptimisticLockException;
  
  import org.apache.ambari.server.AmbariException;
+ import org.apache.ambari.server.configuration.Configuration;
  import org.apache.ambari.server.hooks.HookContextFactory;
  import org.apache.ambari.server.hooks.HookService;
  import org.apache.ambari.server.orm.dao.GroupDAO;
@@@ -57,9 -56,13 +58,11 @@@ import org.apache.ambari.server.securit
  import org.apache.commons.lang.StringUtils;
  import org.slf4j.Logger;
  import org.slf4j.LoggerFactory;
 -import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
 -import org.springframework.security.core.context.SecurityContext;
 -import org.springframework.security.core.context.SecurityContextHolder;
  import org.springframework.security.crypto.password.PasswordEncoder;
  
+ import com.google.common.base.Strings;
+ import com.google.common.collect.Sets;
 +import com.google.inject.Inject;
  import com.google.inject.Provider;
  import com.google.inject.Singleton;
  import com.google.inject.persist.Transactional;
@@@ -72,45 -75,34 +75,48 @@@ public class Users 
  
    private static final Logger LOG = LoggerFactory.getLogger(Users.class);
  
 +  /**
 +   * The maximum number of retries when handling OptimisticLockExceptions
 +   */
 +  private static final int MAX_RETRIES = 10;
 +
    @Inject
 -  Provider<EntityManager> entityManagerProvider;
 +  private Provider<EntityManager> entityManagerProvider;
 +
    @Inject
 -  protected UserDAO userDAO;
 +  private UserDAO userDAO;
 +
    @Inject
 -  protected GroupDAO groupDAO;
 +  private UserAuthenticationDAO userAuthenticationDAO;
 +
    @Inject
 -  protected MemberDAO memberDAO;
 +  private GroupDAO groupDAO;
 +
    @Inject
 -  protected PrincipalDAO principalDAO;
 +  private MemberDAO memberDAO;
 +
    @Inject
 -  protected PermissionDAO permissionDAO;
 +  private PrincipalDAO principalDAO;
 +
    @Inject
 -  protected PrivilegeDAO privilegeDAO;
 +  private PermissionDAO permissionDAO;
 +
    @Inject
 -  protected ResourceDAO resourceDAO;
 +  private PrivilegeDAO privilegeDAO;
 +
    @Inject
 -  protected ResourceTypeDAO resourceTypeDAO;
 +  private ResourceDAO resourceDAO;
 +
    @Inject
 -  protected PrincipalTypeDAO principalTypeDAO;
 +  private PrincipalTypeDAO principalTypeDAO;
 +
    @Inject
 -  protected PasswordEncoder passwordEncoder;
 +  private PasswordEncoder passwordEncoder;
 +
    @Inject
+   protected Configuration configuration;
 -  @Inject
 -  private AmbariLdapAuthenticationProvider ldapAuthenticationProvider;
+ 
+   @Inject
    private Provider<HookService> hookServiceProvider;
  
    @Inject
@@@ -287,29 -355,13 +293,29 @@@
    public synchronized void removeUser(User user) throws AmbariException {
      UserEntity userEntity = userDAO.findByPK(user.getUserId());
      if (userEntity != null) {
 +      removeUser(userEntity);
 +    } else {
 +      throw new AmbariException("User " + user + " doesn't exist");
 +    }
 +  }
 +
 +  /**
 +   * Removes a user from the Ambari database.
 +   * <p>
 +   * It is expected that the associated user authentication records are removed by this operation
 +   * as well.
 +   *
 +   * @param userEntity the user to remove
 +   * @throws AmbariException
 +   */
 +  @Transactional
 +  public synchronized void removeUser(UserEntity userEntity) throws AmbariException {
 +    if (userEntity != null) {
        if (!isUserCanBeRemoved(userEntity)) {
          throw new AmbariException("Could not remove user " + userEntity.getUserName() +
-             ". System should have at least one administrator.");
+           ". System should have at least one administrator.");
        }
        userDAO.remove(userEntity);
 -    } else {
 -      throw new AmbariException("User " + user + " doesn't exist");
      }
    }
  
@@@ -665,38 -652,21 +671,38 @@@
        allGroups.put(groupEntity.getGroupName(), groupEntity);
      }
  
 -    final PrincipalTypeEntity userPrincipalType = principalTypeDAO
 -      .ensurePrincipalTypeCreated(PrincipalTypeEntity.USER_PRINCIPAL_TYPE);
      final PrincipalTypeEntity groupPrincipalType = principalTypeDAO
-         .ensurePrincipalTypeCreated(PrincipalTypeEntity.GROUP_PRINCIPAL_TYPE);
+       .ensurePrincipalTypeCreated(PrincipalTypeEntity.GROUP_PRINCIPAL_TYPE);
  
 -    // remove users
 +    /* *****
 +     * Remove users
 +     *   First remove the relevant LDAP entries for this user.
 +     *   If no more user authentication items exists for the user, then remove the user.
 +     * ***** */
      final Set<UserEntity> usersToRemove = new HashSet<>();
 +    final Set<UserAuthenticationEntity> authenticationEntitiesToRemove = new HashSet<>();
      for (String userName : batchInfo.getUsersToBeRemoved()) {
        UserEntity userEntity = userDAO.findUserByName(userName);
 -      if (userEntity == null) {
 -        continue;
 +      if (userEntity != null) {
 +        List<UserAuthenticationEntity> authenticationEntities = userEntity.getAuthenticationEntities();
 +        Iterator<UserAuthenticationEntity> iterator = authenticationEntities.iterator();
 +        while (iterator.hasNext()) {
 +          UserAuthenticationEntity authenticationEntity = iterator.next();
 +
 +          if (authenticationEntity.getAuthenticationType() == UserAuthenticationType.LDAP) {
 +            // TODO: Determine if this is the _relevant_ LDAP authentication entry - for now there will only be one..
 +            authenticationEntitiesToRemove.add(authenticationEntity);
 +            iterator.remove();
 +          }
 +        }
 +
 +        if (authenticationEntities.isEmpty()) {
 +          allUsers.remove(userEntity.getUserName());
 +          usersToRemove.add(userEntity);
 +        }
        }
 -      allUsers.remove(userEntity.getUserName());
 -      usersToRemove.add(userEntity);
      }
 +    userAuthenticationDAO.remove(authenticationEntitiesToRemove);
      userDAO.remove(usersToRemove);
  
      // remove groups
@@@ -830,8 -776,46 +840,40 @@@
  
      // clear cached entities
      entityManagerProvider.get().getEntityManagerFactory().getCache().evictAll();
 -
 -    if (!usersToCreate.isEmpty()) {
 -      // entry point in the hook logic
 -      hookServiceProvider.get().execute(hookContextFactory.createBatchUserHookContext(getUsersToGroupMap(usersToCreate)));
 -    }
 -
    }
  
+   private void processLdapAdminGroupMappingRules(Set<MemberEntity> membershipsToCreate) {
+ 
+     String adminGroupMappings = configuration.getProperty(Configuration.LDAP_ADMIN_GROUP_MAPPING_RULES);
+     if (Strings.isNullOrEmpty(adminGroupMappings) || membershipsToCreate.isEmpty()) {
+       LOG.info("Nothing to do. LDAP admin group mappings: {}, Memberships to handle: {}", adminGroupMappings, membershipsToCreate.size());
+       return;
+     }
+ 
+     LOG.info("Processing admin group mapping rules [{}]. Membership entry count: [{}]", adminGroupMappings, membershipsToCreate.size());
+ 
+     // parse the comma separated list of mapping rules
+     Set<String> ldapAdminGroups = Sets.newHashSet(adminGroupMappings.split(","));
+ 
+     // LDAP users to become ambari administrators
+     Set<UserEntity> ambariAdminProspects = Sets.newHashSet();
+ 
+     // gathering all the users that need to be ambari admins
+     for (MemberEntity memberEntity : membershipsToCreate) {
+       if (ldapAdminGroups.contains(memberEntity.getGroup().getGroupName())) {
+         LOG.debug("Ambari admin user prospect: [{}] ", memberEntity.getUser().getUserName());
+         ambariAdminProspects.add(memberEntity.getUser());
+       }
+     }
+ 
+     // granting admin privileges to the admin prospects
+     for (UserEntity userEntity : ambariAdminProspects) {
+       LOG.info("Granting ambari admin roles to the user: {}", userEntity.getUserName());
+       grantAdminPrivilege(userEntity.getUserId());
+     }
+ 
+   }
+ 
    /**
     * Assembles a map where the keys are usernames and values are Lists with groups associated with users.
     *

http://git-wip-us.apache.org/repos/asf/ambari/blob/17243c68/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/17243c68/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/17243c68/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/17243c68/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/17243c68/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/17243c68/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/17243c68/ambari-server/src/main/resources/properties.json
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/17243c68/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/17243c68/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ActiveWidgetLayoutResourceProviderTest.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/17243c68/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/GroupPrivilegeResourceProviderTest.java
----------------------------------------------------------------------
diff --cc ambari-server/src/test/java/org/apache/ambari/server/controller/internal/GroupPrivilegeResourceProviderTest.java
index ea981e2,36f6a1e..1eea11c
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/GroupPrivilegeResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/GroupPrivilegeResourceProviderTest.java
@@@ -57,7 -52,6 +57,8 @@@ import org.apache.ambari.server.securit
  import org.apache.ambari.server.security.authorization.AuthorizationException;
  import org.apache.ambari.server.security.authorization.ResourceType;
  import org.apache.ambari.server.security.authorization.Users;
++import org.apache.ambari.server.state.stack.OsFamily;
 +import org.easymock.EasyMockSupport;
  import org.junit.Test;
  import org.springframework.security.core.Authentication;
  import org.springframework.security.core.context.SecurityContextHolder;
@@@ -339,31 -328,8 +340,32 @@@ public class GroupPrivilegeResourceProv
      final ResourceTypeEntity resourceTypeEntity = createNiceMock(ResourceTypeEntity.class);
      final PrivilegeDAO privilegeDAO = createMock(PrivilegeDAO.class);
  
 -    final TestUsers users = new TestUsers();
 -    users.setPrivilegeDAO(privilegeDAO);
 +    final Injector injector = Guice.createInjector(new AbstractModule() {
 +                                                     @Override
 +                                                     protected void configure() {
++                                                       bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
 +                                                       bind(EntityManager.class).toInstance(createNiceMock(EntityManager.class));
 +                                                       bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
 +                                                       bind(PasswordEncoder.class).toInstance(createNiceMock(PasswordEncoder.class));
 +                                                       bind(HookService.class).toInstance(createMock(HookService.class));
 +                                                       bind(HookContextFactory.class).toInstance(createMock(HookContextFactory.class));
 +
 +                                                       bind(GroupDAO.class).toInstance(groupDAO);
 +                                                       bind(ClusterDAO.class).toInstance(clusterDAO);
 +                                                       bind(ViewInstanceDAO.class).toInstance(viewInstanceDAO);
 +                                                       bind(GroupEntity.class).toInstance(groupEntity);
 +                                                       bind(PrincipalEntity.class).toInstance(principalEntity);
 +                                                       bind(PrivilegeEntity.class).toInstance(privilegeEntity);
 +                                                       bind(PermissionEntity.class).toInstance(permissionEntity);
 +                                                       bind(PrincipalTypeEntity.class).toInstance(principalTypeEntity);
 +                                                       bind(ResourceEntity.class).toInstance(resourceEntity);
 +                                                       bind(ResourceTypeEntity.class).toInstance(resourceTypeEntity);
 +                                                       bind(PrivilegeDAO.class).toInstance(privilegeDAO);
 +                                                     }
 +                                                   }
 +    );
 +
 +    final Users users = injector.getInstance(Users.class);
  
      List<PrincipalEntity> groupPrincipals = new LinkedList<>();
      groupPrincipals.add(principalEntity);

http://git-wip-us.apache.org/repos/asf/ambari/blob/17243c68/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RequestImplTest.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/17243c68/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UserPrivilegeResourceProviderTest.java
----------------------------------------------------------------------
diff --cc ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UserPrivilegeResourceProviderTest.java
index 499354f,54726df..35b8234
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UserPrivilegeResourceProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/UserPrivilegeResourceProviderTest.java
@@@ -61,7 -55,6 +60,8 @@@ import org.apache.ambari.server.securit
  import org.apache.ambari.server.security.authorization.AuthorizationException;
  import org.apache.ambari.server.security.authorization.ResourceType;
  import org.apache.ambari.server.security.authorization.Users;
++import org.apache.ambari.server.state.stack.OsFamily;
 +import org.easymock.EasyMockSupport;
  import org.junit.Test;
  import org.springframework.security.core.Authentication;
  import org.springframework.security.core.context.SecurityContextHolder;
@@@ -404,13 -381,23 +404,13 @@@ public class UserPrivilegeResourceProvi
      expect(userEntity.getUserName()).andReturn("jdoe").atLeastOnce();
      expect(userEntity.getPrincipal()).andReturn(principalEntity).atLeastOnce();
  
 -    ClusterDAO clusterDAO = createMock(ClusterDAO.class);
 -    GroupDAO groupDAO = createMock(GroupDAO.class);
 -
 -    ViewInstanceDAO viewInstanceDAO = createMock(ViewInstanceDAO.class);
      expect(viewInstanceDAO.findByResourceId(1L)).andReturn(viewInstanceEntity).atLeastOnce();
  
 -    final UserDAO userDAO = createNiceMock(UserDAO.class);
 -    expect(userDAO.findLocalUserByName("jdoe")).andReturn(userEntity).anyTimes();
 +    expect(userDAO.findUserByName("jdoe")).andReturn(userEntity).anyTimes();
      expect(userDAO.findUserByPrincipal(anyObject(PrincipalEntity.class))).andReturn(userEntity).anyTimes();
-     expect(userDAO.findAll()).andReturn(Collections.<UserEntity>emptyList()).anyTimes();
+     expect(userDAO.findAll()).andReturn(Collections.emptyList()).anyTimes();
  
 -    final PrivilegeDAO privilegeDAO = createMock(PrivilegeDAO.class);
 -    final MemberDAO memberDAO = createMock(MemberDAO.class);
 -
 -    final TestUsers users = new TestUsers();
 -    users.setPrivilegeDAO(privilegeDAO);
 -    users.setMemberDAO(memberDAO);
 +    final Users users = injector.getInstance(Users.class);
  
      List<PrincipalEntity> rolePrincipals = new LinkedList<>();
      rolePrincipals.add(rolePrincipalEntity);
@@@ -482,12 -468,12 +482,12 @@@
          andReturn(Collections.singletonList(privilegeEntity))
          .atLeastOnce();
      expect(memberDAO.findAllMembersByUser(userEntity)).
-         andReturn(Collections.<MemberEntity>emptyList())
+         andReturn(Collections.emptyList())
          .atLeastOnce();
 -    expect(userDAO.findLocalUserByName(requestedUsername)).andReturn(userEntity).anyTimes();
+     expect(userDAO.findAll()).andReturn(Collections.emptyList()).anyTimes();
 +    expect(userDAO.findUserByName(requestedUsername)).andReturn(userEntity).anyTimes();
-     expect(userDAO.findAll()).andReturn(Collections.<UserEntity>emptyList()).anyTimes();
      expect(userEntity.getPrincipal()).andReturn(principalEntity).anyTimes();
-     expect(userEntity.getMemberEntities()).andReturn(Collections.<MemberEntity>emptySet()).anyTimes();
+     expect(userEntity.getMemberEntities()).andReturn(Collections.emptySet()).anyTimes();
      expect(privilegeEntity.getPermission()).andReturn(permissionEntity).anyTimes();
      expect(privilegeEntity.getPrincipal()).andReturn(principalEntity).anyTimes();
      expect(principalEntity.getPrincipalType()).andReturn(principalTypeEntity).anyTimes();
@@@ -531,24 -517,4 +531,25 @@@
      verifyAll();
    }
  
 +  private Injector createInjector() {
 +    return Guice.createInjector(new AbstractModule() {
 +      @Override
 +      protected void configure() {
++        bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
 +        bind(EntityManager.class).toInstance(createNiceMock(EntityManager.class));
 +        bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
 +        bind(PasswordEncoder.class).toInstance(createNiceMock(PasswordEncoder.class));
 +        bind(HookService.class).toInstance(createMock(HookService.class));
 +        bind(HookContextFactory.class).toInstance(createMock(HookContextFactory.class));
 +
 +        bind(UserDAO.class).toInstance(createNiceMock(UserDAO.class));
 +        bind(GroupDAO.class).toInstance(createNiceMock(GroupDAO.class));
 +        bind(ClusterDAO.class).toInstance(createNiceMock(ClusterDAO.class));
 +        bind(ViewInstanceDAO.class).toInstance(createNiceMock(ViewInstanceDAO.class));
 +        bind(PrivilegeDAO.class).toInstance(createMock(PrivilegeDAO.class));
 +        bind(MemberDAO.class).toInstance(createMock(MemberDAO.class));
 +      }
 +    });
 +  }
 +
  }

http://git-wip-us.apache.org/repos/asf/ambari/blob/17243c68/ambari-server/src/test/java/org/apache/ambari/server/orm/OrmTestHelper.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/17243c68/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/AmbariAuthorizationFilterTest.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/ambari/blob/17243c68/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/AmbariPamAuthenticationProviderTest.java
----------------------------------------------------------------------
diff --cc ambari-server/src/test/java/org/apache/ambari/server/security/authorization/AmbariPamAuthenticationProviderTest.java
index 1145954,9cfd148..4e080b1
--- a/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/AmbariPamAuthenticationProviderTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/AmbariPamAuthenticationProviderTest.java
@@@ -17,23 -17,24 +17,29 @@@
   */
  package org.apache.ambari.server.security.authorization;
  
- import static org.easymock.EasyMock.createNiceMock;
+ import static org.easymock.EasyMock.anyObject;
+ import static org.easymock.EasyMock.eq;
  import static org.easymock.EasyMock.expect;
+ import static org.easymock.EasyMock.expectLastCall;
  
- import java.util.Collection;
  import java.util.Collections;
  
- import org.apache.ambari.server.H2DatabaseCleaner;
- import org.apache.ambari.server.audit.AuditLoggerModule;
+ import javax.persistence.EntityManager;
+ 
  import org.apache.ambari.server.configuration.Configuration;
- import org.apache.ambari.server.orm.GuiceJpaInitializer;
+ import org.apache.ambari.server.hooks.HookContextFactory;
+ import org.apache.ambari.server.hooks.HookService;
+ import org.apache.ambari.server.orm.DBAccessor;
++import org.apache.ambari.server.orm.dao.MemberDAO;
++import org.apache.ambari.server.orm.dao.PrivilegeDAO;
  import org.apache.ambari.server.orm.dao.UserDAO;
 +import org.apache.ambari.server.orm.entities.PrincipalEntity;
 +import org.apache.ambari.server.orm.entities.UserAuthenticationEntity;
 +import org.apache.ambari.server.orm.entities.UserEntity;
  import org.apache.ambari.server.security.ClientSecurityType;
- import org.easymock.EasyMock;
- import org.junit.After;
+ import org.apache.ambari.server.security.authentication.pam.PamAuthenticationFactory;
+ import org.apache.ambari.server.state.stack.OsFamily;
+ import org.easymock.EasyMockSupport;
  import org.junit.Before;
  import org.junit.Test;
  import org.jvnet.libpam.PAM;
@@@ -47,22 -52,32 +57,34 @@@ import com.google.inject.Injector
  
  import junit.framework.Assert;
  
- public class AmbariPamAuthenticationProviderTest {
- 
-   private static Injector injector;
- 
-   @Inject
-   private AmbariPamAuthenticationProvider authenticationProvider;
-   @Inject
-   private Configuration configuration;
+ public class AmbariPamAuthenticationProviderTest extends EasyMockSupport {
  
    private static final String TEST_USER_NAME = "userName";
+   private static final String TEST_USER_PASS = "userPass";
+   private static final String TEST_USER_INCORRECT_PASS = "userIncorrectPass";
+ 
+   private Injector injector;
  
    @Before
-   public void setUp() {
-     injector = Guice.createInjector(new AuditLoggerModule(), new AuthorizationTestModule());
-     injector.injectMembers(this);
-     injector.getInstance(GuiceJpaInitializer.class);
+   public void setup() {
+     injector = Guice.createInjector(new AbstractModule() {
+ 
+       @Override
+       protected void configure() {
+         bind(EntityManager.class).toInstance(createNiceMock(EntityManager.class));
+         bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class));
+         bind(HookContextFactory.class).toInstance(createNiceMock(HookContextFactory.class));
+         bind(HookService.class).toInstance(createNiceMock(HookService.class));
+         bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class));
+         bind(UserDAO.class).toInstance(createNiceMock(UserDAO.class));
++        bind(MemberDAO.class).toInstance(createNiceMock(MemberDAO.class));
++        bind(PrivilegeDAO.class).toInstance(createNiceMock(PrivilegeDAO.class));
+         bind(PamAuthenticationFactory.class).toInstance(createMock(PamAuthenticationFactory.class));
+         bind(PasswordEncoder.class).toInstance(new StandardPasswordEncoder());
+       }
+     });
+ 
+     Configuration configuration = injector.getInstance(Configuration.class);
      configuration.setClientSecurityType(ClientSecurityType.PAM);
      configuration.setProperty(Configuration.PAM_CONFIGURATION_FILE, "ambari-pam");
    }
@@@ -83,23 -107,33 +114,35 @@@
  
    @Test
    public void testAuthenticate() throws Exception {
-     PAM pam = createNiceMock(PAM.class);
+ 
      UnixUser unixUser = createNiceMock(UnixUser.class);
 -    expect(unixUser.getGroups()).andReturn(Collections.singleton("group")).atLeastOnce();
 +    expect(unixUser.getUserName()).andReturn(TEST_USER_NAME).atLeastOnce();
+ 
+     PAM pam = createMock(PAM.class);
 -    expect(pam.authenticate(eq(TEST_USER_NAME), eq(TEST_USER_PASS)))
 -        .andReturn(unixUser)
 -        .once();
 -    pam.dispose();
 -    expectLastCall().once();
++    expect(pam.authenticate(eq(TEST_USER_NAME), eq(TEST_USER_PASS))).andReturn(unixUser).once();
+ 
 -    PamAuthenticationFactory pamAuthenticationFactory = injector.getInstance(PamAuthenticationFactory.class);
 -    expect(pamAuthenticationFactory.createInstance(anyObject(String.class))).andReturn(pam).once();
 +    UserEntity userEntity = combineUserEntity();
-     User user = new User(userEntity);
-     UserDAO userDAO = createNiceMock(UserDAO.class);
-     Collection<AmbariGrantedAuthority> userAuthorities = Collections.singletonList(createNiceMock(AmbariGrantedAuthority.class));
-     expect(pam.authenticate(EasyMock.anyObject(String.class), EasyMock.anyObject(String.class))).andReturn(unixUser).atLeastOnce();
-     expect(unixUser.getGroups()).andReturn(Collections.singleton("group")).atLeastOnce();
-     EasyMock.replay(unixUser);
-     EasyMock.replay(pam);
-     Authentication authentication = new AmbariUserAuthentication("userPass", user, userAuthorities);
-     Authentication result = authenticationProvider.authenticateViaPam(pam,authentication);
-     expect(userDAO.findUserByName("userName")).andReturn(null).once();
+ 
 -    replayAll();
++    UserDAO userDAO = injector.getInstance(UserDAO.class);
++    expect(userDAO.findUserByName(TEST_USER_NAME)).andReturn(userEntity).once();
+ 
 -    Authentication authentication = new UsernamePasswordAuthenticationToken(TEST_USER_NAME, TEST_USER_PASS);
++    MemberDAO memberDAO = injector.getInstance(MemberDAO.class);
++    expect(memberDAO.findAllMembersByUser(userEntity)).andReturn(Collections.emptyList()).once();
+ 
 -    AmbariPamAuthenticationProvider authenticationProvider = injector.getInstance(AmbariPamAuthenticationProvider.class);
++    PrivilegeDAO privilegeDAO = injector.getInstance(PrivilegeDAO.class);
++    expect(privilegeDAO.findAllByPrincipal(anyObject())).andReturn(Collections.emptyList()).once();
+ 
 -    Authentication result = authenticationProvider.authenticate(authentication);
++    replayAll();
+ 
 -    verifyAll();
++    Authentication authentication = new UsernamePasswordAuthenticationToken(TEST_USER_NAME, TEST_USER_PASS);
++    AmbariPamAuthenticationProvider authenticationProvider = injector.getInstance(AmbariPamAuthenticationProvider.class);
+ 
++    Authentication result = authenticationProvider.authenticateViaPam(pam, authentication);
      Assert.assertNotNull(result);
      Assert.assertEquals(true, result.isAuthenticated());
      Assert.assertTrue(result instanceof AmbariUserAuthentication);
++
++    verifyAll();
    }
  
    @Test

http://git-wip-us.apache.org/repos/asf/ambari/blob/17243c68/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/UsersTest.java
----------------------------------------------------------------------
diff --cc ambari-server/src/test/java/org/apache/ambari/server/security/authorization/UsersTest.java
index ffa68fa,29c21a7..4283788
--- a/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/UsersTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/security/authorization/UsersTest.java
@@@ -161,11 -161,12 +161,11 @@@ public class UsersTest extends EasyMock
    @Test(expected = AmbariException.class)
    public void testCreateUser_Duplicate() throws Exception {
      UserEntity existing = new UserEntity();
 -    existing.setUserName(UserName.fromString(SERVICEOP_USER_NAME));
 -    existing.setUserType(UserType.LDAP);
 +    existing.setUserName(UserName.fromString(SERVICEOP_USER_NAME).toString());
      existing.setUserId(1);
-     existing.setMemberEntities(Collections.<MemberEntity>emptySet());
+     existing.setMemberEntities(Collections.emptySet());
      PrincipalEntity principal = new PrincipalEntity();
-     principal.setPrivileges(Collections.<PrivilegeEntity>emptySet());
+     principal.setPrivileges(Collections.emptySet());
      existing.setPrincipal(principal);
      initForCreateUser(existing);
  

http://git-wip-us.apache.org/repos/asf/ambari/blob/17243c68/ambari-server/src/test/java/org/apache/ambari/server/security/ldap/AmbariLdapDataPopulatorTest.java
----------------------------------------------------------------------
diff --cc ambari-server/src/test/java/org/apache/ambari/server/security/ldap/AmbariLdapDataPopulatorTest.java
index cf05425,afd6f6d..34bbe97
--- a/ambari-server/src/test/java/org/apache/ambari/server/security/ldap/AmbariLdapDataPopulatorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/security/ldap/AmbariLdapDataPopulatorTest.java
@@@ -52,8 -52,6 +52,7 @@@ import org.apache.ambari.server.configu
  import org.apache.ambari.server.orm.entities.GroupEntity;
  import org.apache.ambari.server.orm.entities.MemberEntity;
  import org.apache.ambari.server.orm.entities.PrincipalEntity;
- import org.apache.ambari.server.orm.entities.PrivilegeEntity;
 +import org.apache.ambari.server.orm.entities.UserAuthenticationEntity;
  import org.apache.ambari.server.orm.entities.UserEntity;
  import org.apache.ambari.server.security.authorization.AmbariLdapUtils;
  import org.apache.ambari.server.security.authorization.Group;
@@@ -1973,15 -1970,14 +1972,15 @@@ public class AmbariLdapDataPopulatorTes
    private User createUser(String name, boolean ldapUser, GroupEntity group) {
      final UserEntity userEntity = new UserEntity();
      userEntity.setUserId(userIdCounter++);
 -    userEntity.setUserName(UserName.fromString(name));
 +    userEntity.setUserName(UserName.fromString(name).toString());
      userEntity.setCreateTime(new Date());
 -    userEntity.setLdapUser(ldapUser);
      userEntity.setActive(true);
-     userEntity.setMemberEntities(new HashSet<MemberEntity>());
+     userEntity.setMemberEntities(new HashSet<>());
 +
      final PrincipalEntity principalEntity = new PrincipalEntity();
-     principalEntity.setPrivileges(new HashSet<PrivilegeEntity>());
+     principalEntity.setPrivileges(new HashSet<>());
      userEntity.setPrincipal(principalEntity);
 +
      if (group != null) {
        final MemberEntity member = new MemberEntity();
        member.setUser(userEntity);


[16/50] [abbrv] ambari git commit: AMBARI-22020. Setup tez ats loglevel to TASK_ATTEMPT to enable the filters. (Harish Jaiprakash via Swapan Shridhar)

Posted by rl...@apache.org.
AMBARI-22020. Setup tez ats loglevel to TASK_ATTEMPT to enable the filters. (Harish Jaiprakash via Swapan Shridhar)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/40e712a1
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/40e712a1
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/40e712a1

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 40e712a187bf36f49dfed584c923df271cca522d
Parents: f4e4fae
Author: Swapan Shridhar <ss...@hortonworks.com>
Authored: Wed Sep 27 10:10:40 2017 -0700
Committer: Swapan Shridhar <ss...@hortonworks.com>
Committed: Wed Sep 27 10:10:40 2017 -0700

----------------------------------------------------------------------
 .../2.6/services/HIVE/configuration/tez-interactive-site.xml   | 6 ++++++
 1 file changed, 6 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/40e712a1/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/tez-interactive-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/tez-interactive-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/tez-interactive-site.xml
index 2c9b272..4842122 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/tez-interactive-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/HIVE/configuration/tez-interactive-site.xml
@@ -75,6 +75,12 @@
     <on-ambari-upgrade add="true"/>
   </property>
   <property>
+    <name>tez.history.logging.log.level</name>
+    <value>TASK_ATTEMPT</value>
+    <description>Set the log level to TASK_ATTEMPT.</description>
+    <on-ambari-upgrade add="false"/>
+  </property>
+  <property>
     <name>tez.history.logging.taskattempt-filters</name>
     <value>SERVICE_BUSY,EXTERNAL_PREEMPTION</value>
     <description>TASK_ATTEMPT events to be ignored.</description>


[35/50] [abbrv] ambari git commit: AMBARI-22099. Patch Upgrade UI Change: Exclamation Icon's Center shouldn't be transparent

Posted by rl...@apache.org.
AMBARI-22099. Patch Upgrade UI Change: Exclamation Icon's Center shouldn't be transparent


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f69ccd89
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f69ccd89
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f69ccd89

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: f69ccd890bdfd0e4df78fc8c52f8799d7a61b03b
Parents: 510a262
Author: Alex Antonenko <aa...@hortonworks.com>
Authored: Fri Sep 29 21:40:02 2017 +0300
Committer: Alex Antonenko <aa...@hortonworks.com>
Committed: Fri Sep 29 21:40:02 2017 +0300

----------------------------------------------------------------------
 ambari-web/app/styles/stack_versions.less | 11 +++++++++++
 1 file changed, 11 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f69ccd89/ambari-web/app/styles/stack_versions.less
----------------------------------------------------------------------
diff --git a/ambari-web/app/styles/stack_versions.less b/ambari-web/app/styles/stack_versions.less
index 28a3239..f47e3fc 100644
--- a/ambari-web/app/styles/stack_versions.less
+++ b/ambari-web/app/styles/stack_versions.less
@@ -124,10 +124,13 @@
 }
 .version-box-popup {
   .modal{
+    overflow: visible;
     .modal-dialog{
       width: 340px;
     }
     .modal-body {
+      padding: 0px 0px;
+      overflow: visible;
       padding: 0;
     }
   }
@@ -189,6 +192,14 @@
       left: -8px;
       color: orange;
       z-index: 3;
+      height: 24px;
+      width: 24px;
+      border-radius: 24px;
+      background-color: #fff;
+      &:before{
+        position: relative;
+        top:-4px;
+      }
     }
     .state {
       margin: 15px 0;


[15/50] [abbrv] ambari git commit: AMBARI-22069.Zeppelin JDBC queries should be split by default(Prabhjyot Singh via Venkata Sairam)

Posted by rl...@apache.org.
AMBARI-22069.Zeppelin JDBC queries should be split by default(Prabhjyot Singh via Venkata Sairam)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f4e4fae8
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f4e4fae8
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f4e4fae8

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: f4e4fae8ef085a7e646ea0c5f986b00f10cfdfdc
Parents: 407eb54
Author: Venkata Sairam <ve...@gmail.com>
Authored: Wed Sep 27 19:46:56 2017 +0530
Committer: Venkata Sairam <ve...@gmail.com>
Committed: Wed Sep 27 19:46:56 2017 +0530

----------------------------------------------------------------------
 .../ZEPPELIN/0.7.0/package/scripts/master.py           | 13 +++++++++++++
 .../python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py    |  6 +++---
 2 files changed, 16 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f4e4fae8/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
index 8bdfaec..af5758a 100644
--- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
+++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/master.py
@@ -326,6 +326,7 @@ class Master(Script):
                               type="file",
                               action="download_on_execute",
                               source=self.getZeppelinConfFS(params),
+                              user=params.zeppelin_user,
                               group=params.zeppelin_group,
                               owner=params.zeppelin_user)
         except Fail as fail:
@@ -456,6 +457,9 @@ class Master(Script):
             interpreter['properties']['hive.url'] = 'jdbc:hive2://' + \
                                                  params.hive_server_host + \
                                                      ':' + params.hive_server_port
+          if 'hive.splitQueries' not in interpreter['properties']:
+            interpreter['properties']["hive.splitQueries"] = "true"
+
         if params.hive_server_interactive_hosts:
           interpreter['properties'][hive_interactive_properties_key + '.driver'] = 'org.apache.hive.jdbc.HiveDriver'
           interpreter['properties'][hive_interactive_properties_key + '.user'] = 'hive'
@@ -470,6 +474,8 @@ class Master(Script):
             interpreter['properties'][hive_interactive_properties_key + '.url'] = 'jdbc:hive2://' + \
                                                     params.hive_server_interactive_hosts + \
                                                     ':' + params.hive_server_port
+          if hive_interactive_properties_key + '.splitQueries' not in interpreter['properties']:
+            interpreter['properties'][hive_interactive_properties_key + '.splitQueries'] = "true"
 
         if params.spark_thrift_server_hosts:
           interpreter['properties']['spark.driver'] = 'org.apache.hive.jdbc.HiveDriver'
@@ -480,6 +486,8 @@ class Master(Script):
               params.spark_thrift_server_hosts + ':' + params.spark_hive_thrift_port + '/'
           if params.spark_hive_principal:
             interpreter['properties']['spark.url'] += ';principal=' + params.spark_hive_principal
+          if 'spark.splitQueries' not in interpreter['properties']:
+            interpreter['properties']['spark.splitQueries'] = "true"
 
         if params.spark2_thrift_server_hosts:
           interpreter['properties']['spark2.driver'] = 'org.apache.hive.jdbc.HiveDriver'
@@ -490,6 +498,8 @@ class Master(Script):
               params.spark2_thrift_server_hosts + ':' + params.spark2_hive_thrift_port + '/'
           if params.spark_hive_principal:
             interpreter['properties']['spark2.url'] += ';principal=' + params.spark2_hive_principal
+          if 'spark2.splitQueries' not in interpreter['properties']:
+            interpreter['properties']['spark2.splitQueries'] = "true"
 
         if params.zookeeper_znode_parent \
                 and params.hbase_zookeeper_quorum:
@@ -500,6 +510,9 @@ class Master(Script):
             interpreter['properties']['phoenix.url'] = "jdbc:phoenix:" + \
                                                     params.hbase_zookeeper_quorum + ':' + \
                                                     params.zookeeper_znode_parent
+            if 'phoenix.splitQueries' not in interpreter['properties']:
+              interpreter['properties']['phoenix.splitQueries'] = "true"
+
 
       elif interpreter['group'] == 'livy' and interpreter['name'] == 'livy':
         if params.livy_livyserver_host:

http://git-wip-us.apache.org/repos/asf/ambari/blob/f4e4fae8/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py b/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
index 1fdbadb..5ae3739 100644
--- a/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
+++ b/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
@@ -312,7 +312,7 @@ class TestZeppelin070(RMFTestCase):
                               hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                               hdfs_site={u'a': u'b'},
                               kinit_path_local='/usr/bin/kinit',
-                              user="hdfs",
+                              user="zeppelin",
                               owner="zeppelin",
                               group="zeppelin",
                               type='file',
@@ -357,7 +357,7 @@ class TestZeppelin070(RMFTestCase):
                               hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                               hdfs_site={u'a': u'b'},
                               kinit_path_local='/usr/bin/kinit',
-                              user="hdfs",
+                              user="zeppelin",
                               owner="zeppelin",
                               group="zeppelin",
                               type='file',
@@ -402,7 +402,7 @@ class TestZeppelin070(RMFTestCase):
                               hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
                               hdfs_site={u'a': u'b'},
                               kinit_path_local='/usr/bin/kinit',
-                              user="hdfs",
+                              user="zeppelin",
                               owner="zeppelin",
                               group="zeppelin",
                               type='file',


[24/50] [abbrv] ambari git commit: AMBARI-22085. Zeppelin start failed (aonishuk)

Posted by rl...@apache.org.
AMBARI-22085. Zeppelin start failed (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/fb4115e2
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/fb4115e2
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/fb4115e2

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: fb4115e27eda3591d944bd7adb6c0fb449538a2a
Parents: 63733bf
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Thu Sep 28 19:50:57 2017 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Thu Sep 28 19:50:57 2017 +0300

----------------------------------------------------------------------
 .pydevproject                                   |   5 -
 ambari-agent/.pydevproject                      |  14 --
 ambari-server/.pydevproject                     |   5 -
 .../stacks/2.6/ZEPPELIN/test_zeppelin_070.py    | 216 +++++++++----------
 4 files changed, 105 insertions(+), 135 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/fb4115e2/.pydevproject
----------------------------------------------------------------------
diff --git a/.pydevproject b/.pydevproject
deleted file mode 100644
index 40e9f40..0000000
--- a/.pydevproject
+++ /dev/null
@@ -1,5 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<?eclipse-pydev version="1.0"?><pydev_project>
-<pydev_property name="org.python.pydev.PYTHON_PROJECT_INTERPRETER">Default</pydev_property>
-<pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python 2.7</pydev_property>
-</pydev_project>

http://git-wip-us.apache.org/repos/asf/ambari/blob/fb4115e2/ambari-agent/.pydevproject
----------------------------------------------------------------------
diff --git a/ambari-agent/.pydevproject b/ambari-agent/.pydevproject
deleted file mode 100644
index c62d80f..0000000
--- a/ambari-agent/.pydevproject
+++ /dev/null
@@ -1,14 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<?eclipse-pydev version="1.0"?><pydev_project>
-<pydev_property name="org.python.pydev.PYTHON_PROJECT_INTERPRETER">Default</pydev_property>
-<pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python 2.7</pydev_property>
-<pydev_pathproperty name="org.python.pydev.PROJECT_EXTERNAL_SOURCE_PATH">
-<path>/home/user/ambari/ambari-common/src/test/python</path>
-<path>/home/user/ambari/ambari-common/src/main/python</path>
-<path>/home/user/ambari/ambari-common/src/main/python/ambari_jinja2</path>
-</pydev_pathproperty>
-<pydev_pathproperty name="org.python.pydev.PROJECT_SOURCE_PATH">
-<path>/${PROJECT_DIR_NAME}/src/main/python</path>
-<path>/${PROJECT_DIR_NAME}/src/main/python/ambari_agent</path>
-</pydev_pathproperty>
-</pydev_project>

http://git-wip-us.apache.org/repos/asf/ambari/blob/fb4115e2/ambari-server/.pydevproject
----------------------------------------------------------------------
diff --git a/ambari-server/.pydevproject b/ambari-server/.pydevproject
deleted file mode 100644
index 7ff1370..0000000
--- a/ambari-server/.pydevproject
+++ /dev/null
@@ -1,5 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<?eclipse-pydev version="1.0"?><pydev_project>
-<pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python 2.7</pydev_property>
-<pydev_property name="org.python.pydev.PYTHON_PROJECT_INTERPRETER">Default</pydev_property>
-</pydev_project>

http://git-wip-us.apache.org/repos/asf/ambari/blob/fb4115e2/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py b/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
index 5ae3739..e0e4259 100644
--- a/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
+++ b/ambari-server/src/test/python/stacks/2.6/ZEPPELIN/test_zeppelin_070.py
@@ -305,24 +305,23 @@ class TestZeppelin070(RMFTestCase):
                               security_enabled=False,
                               )
 
-    self.assertResourceCalled('HdfsResource',
-                              '/etc/zeppelin/conf/interpreter.json',
-                              hadoop_bin_dir='/usr/hdp/current/hadoop-client/bin',
-                              default_fs=u'hdfs://c6401.ambari.apache.org:8020',
-                              hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
-                              hdfs_site={u'a': u'b'},
-                              kinit_path_local='/usr/bin/kinit',
-                              user="zeppelin",
-                              owner="zeppelin",
-                              group="zeppelin",
-                              type='file',
-                              action=['download_on_execute'],
-                              source="/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json",
-                              hadoop_conf_dir='/usr/hdp/current/hadoop-client/conf',
-                              keytab=UnknownConfigurationMock(),
-                              principal_name=UnknownConfigurationMock(),
-                              security_enabled=False,
-                              )
+    self.assertResourceCalled('HdfsResource', '/etc/zeppelin/conf/interpreter.json',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = UnknownConfigurationMock(),
+        source = '/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json',
+        default_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        hdfs_resource_ignore_file = '/var/lib/ambari-agent/data/.hdfs_resource_ignore',
+        hdfs_site = {u'a': u'b'},
+        kinit_path_local = '/usr/bin/kinit',
+        principal_name = UnknownConfigurationMock(),
+        user = 'hdfs',
+        owner = 'zeppelin',
+        group = 'zeppelin',
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        type = 'file',
+        action = ['download_on_execute'],
+    )
 
     self.assertResourceCalled('File', '/etc/zeppelin/conf/interpreter.json',
                               content=interpreter_json_generated.template_after_base,
@@ -330,44 +329,42 @@ class TestZeppelin070(RMFTestCase):
                               group='zeppelin',
                               )
 
-    self.assertResourceCalled('HdfsResource',
-                              '/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json',
-                              hadoop_bin_dir='/usr/hdp/current/hadoop-client/bin',
-                              default_fs=u'hdfs://c6401.ambari.apache.org:8020',
-                              hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
-                              hdfs_site={u'a': u'b'},
-                              kinit_path_local='/usr/bin/kinit',
-                              user="zeppelin",
-                              owner="zeppelin",
-                              group="zeppelin",
-                              type='file',
-                              action=['create_on_execute'],
-                              source="/etc/zeppelin/conf/interpreter.json",
-                              replace_existing_files=True,
-                              hadoop_conf_dir='/usr/hdp/current/hadoop-client/conf',
-                              keytab=UnknownConfigurationMock(),
-                              principal_name=UnknownConfigurationMock(),
-                              security_enabled=False,
-                              )
+    self.assertResourceCalled('HdfsResource', '/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = UnknownConfigurationMock(),
+        source = '/etc/zeppelin/conf/interpreter.json',
+        default_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        replace_existing_files = True,
+        hdfs_resource_ignore_file = '/var/lib/ambari-agent/data/.hdfs_resource_ignore',
+        hdfs_site = {u'a': u'b'},
+        kinit_path_local = '/usr/bin/kinit',
+        principal_name = UnknownConfigurationMock(),
+        user = 'hdfs',
+        owner = 'zeppelin',
+        group = 'zeppelin',
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        type = 'file',
+        action = ['create_on_execute'],
+    )
 
-    self.assertResourceCalled('HdfsResource',
-                              '/etc/zeppelin/conf/interpreter.json',
-                              hadoop_bin_dir='/usr/hdp/current/hadoop-client/bin',
-                              default_fs=u'hdfs://c6401.ambari.apache.org:8020',
-                              hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
-                              hdfs_site={u'a': u'b'},
-                              kinit_path_local='/usr/bin/kinit',
-                              user="zeppelin",
-                              owner="zeppelin",
-                              group="zeppelin",
-                              type='file',
-                              action=['download_on_execute'],
-                              source="/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json",
-                              hadoop_conf_dir='/usr/hdp/current/hadoop-client/conf',
-                              keytab=UnknownConfigurationMock(),
-                              principal_name=UnknownConfigurationMock(),
-                              security_enabled=False,
-                              )
+    self.assertResourceCalled('HdfsResource', '/etc/zeppelin/conf/interpreter.json',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = UnknownConfigurationMock(),
+        source = '/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json',
+        default_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        hdfs_resource_ignore_file = '/var/lib/ambari-agent/data/.hdfs_resource_ignore',
+        hdfs_site = {u'a': u'b'},
+        kinit_path_local = '/usr/bin/kinit',
+        principal_name = UnknownConfigurationMock(),
+        user = 'hdfs',
+        owner = 'zeppelin',
+        group = 'zeppelin',
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        type = 'file',
+        action = ['download_on_execute'],
+    )
 
     self.assertResourceCalled('File', '/etc/zeppelin/conf/interpreter.json',
                               content=interpreter_json_generated.template_after_without_spark_and_livy,
@@ -375,44 +372,42 @@ class TestZeppelin070(RMFTestCase):
                               group='zeppelin',
                               )
 
-    self.assertResourceCalled('HdfsResource',
-                              '/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json',
-                              hadoop_bin_dir='/usr/hdp/current/hadoop-client/bin',
-                              default_fs=u'hdfs://c6401.ambari.apache.org:8020',
-                              hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
-                              hdfs_site={u'a': u'b'},
-                              kinit_path_local='/usr/bin/kinit',
-                              user="zeppelin",
-                              owner="zeppelin",
-                              group="zeppelin",
-                              type='file',
-                              action=['create_on_execute'],
-                              source="/etc/zeppelin/conf/interpreter.json",
-                              replace_existing_files=True,
-                              hadoop_conf_dir='/usr/hdp/current/hadoop-client/conf',
-                              keytab=UnknownConfigurationMock(),
-                              principal_name=UnknownConfigurationMock(),
-                              security_enabled=False,
-                              )
+    self.assertResourceCalled('HdfsResource', '/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = UnknownConfigurationMock(),
+        source = '/etc/zeppelin/conf/interpreter.json',
+        default_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        replace_existing_files = True,
+        hdfs_resource_ignore_file = '/var/lib/ambari-agent/data/.hdfs_resource_ignore',
+        hdfs_site = {u'a': u'b'},
+        kinit_path_local = '/usr/bin/kinit',
+        principal_name = UnknownConfigurationMock(),
+        user = 'hdfs',
+        owner = 'zeppelin',
+        group = 'zeppelin',
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        type = 'file',
+        action = ['create_on_execute'],
+    )
 
-    self.assertResourceCalled('HdfsResource',
-                              '/etc/zeppelin/conf/interpreter.json',
-                              hadoop_bin_dir='/usr/hdp/current/hadoop-client/bin',
-                              default_fs=u'hdfs://c6401.ambari.apache.org:8020',
-                              hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
-                              hdfs_site={u'a': u'b'},
-                              kinit_path_local='/usr/bin/kinit',
-                              user="zeppelin",
-                              owner="zeppelin",
-                              group="zeppelin",
-                              type='file',
-                              action=['download_on_execute'],
-                              source="/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json",
-                              hadoop_conf_dir='/usr/hdp/current/hadoop-client/conf',
-                              keytab=UnknownConfigurationMock(),
-                              principal_name=UnknownConfigurationMock(),
-                              security_enabled=False,
-                              )
+    self.assertResourceCalled('HdfsResource', '/etc/zeppelin/conf/interpreter.json',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = UnknownConfigurationMock(),
+        source = '/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json',
+        default_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        hdfs_resource_ignore_file = '/var/lib/ambari-agent/data/.hdfs_resource_ignore',
+        hdfs_site = {u'a': u'b'},
+        kinit_path_local = '/usr/bin/kinit',
+        principal_name = UnknownConfigurationMock(),
+        user = 'hdfs',
+        owner = 'zeppelin',
+        group = 'zeppelin',
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        type = 'file',
+        action = ['download_on_execute'],
+    )
 
     self.assertResourceCalled('File', '/etc/zeppelin/conf/interpreter.json',
                               content=interpreter_json_generated.template_after_kerberos,
@@ -420,25 +415,24 @@ class TestZeppelin070(RMFTestCase):
                               group='zeppelin',
                               )
 
-    self.assertResourceCalled('HdfsResource',
-                              '/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json',
-                              hadoop_bin_dir='/usr/hdp/current/hadoop-client/bin',
-                              default_fs=u'hdfs://c6401.ambari.apache.org:8020',
-                              hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
-                              hdfs_site={u'a': u'b'},
-                              kinit_path_local='/usr/bin/kinit',
-                              user="zeppelin",
-                              owner="zeppelin",
-                              group="zeppelin",
-                              type='file',
-                              action=['create_on_execute'],
-                              source="/etc/zeppelin/conf/interpreter.json",
-                              replace_existing_files=True,
-                              hadoop_conf_dir='/usr/hdp/current/hadoop-client/conf',
-                              keytab=UnknownConfigurationMock(),
-                              principal_name=UnknownConfigurationMock(),
-                              security_enabled=False,
-                              )
+    self.assertResourceCalled('HdfsResource', '/user/zeppelin/hdfs:///user/zeppelin/conf/interpreter.json',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = UnknownConfigurationMock(),
+        source = '/etc/zeppelin/conf/interpreter.json',
+        default_fs = 'hdfs://c6401.ambari.apache.org:8020',
+        replace_existing_files = True,
+        hdfs_resource_ignore_file = '/var/lib/ambari-agent/data/.hdfs_resource_ignore',
+        hdfs_site = {u'a': u'b'},
+        kinit_path_local = '/usr/bin/kinit',
+        principal_name = UnknownConfigurationMock(),
+        user = 'hdfs',
+        owner = 'zeppelin',
+        group = 'zeppelin',
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        type = 'file',
+        action = ['create_on_execute'],
+    )
 
     self.assertResourceCalled('Execute',
                               '/usr/hdp/current/zeppelin-server/bin/zeppelin-daemon.sh restart >> /var/log/zeppelin/zeppelin-setup.log',


[38/50] [abbrv] ambari git commit: AMBARI-22083 - Wrong Hadoop Home Directory Is Being Picked Up on MAINT/PATCH Upgraded Clusters (jonathanhurley)

Posted by rl...@apache.org.
AMBARI-22083 - Wrong Hadoop Home Directory Is Being Picked Up on MAINT/PATCH Upgraded Clusters (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/31344fff
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/31344fff
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/31344fff

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 31344fff2ddd94923f053d6c9c6b945c299bd5d5
Parents: 18fd04f
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Wed Sep 27 11:52:11 2017 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Fri Sep 29 15:40:38 2017 -0400

----------------------------------------------------------------------
 .../libraries/functions/component_version.py    |  26 +--
 .../libraries/functions/conf_select.py          |  79 ++--------
 .../libraries/functions/stack_select.py         |  69 ++++----
 .../ambari/server/agent/StatusCommand.java      |  16 ++
 .../package/scripts/hive_server_upgrade.py      |   5 -
 .../0.12.0.2.0/package/scripts/params_linux.py  |   5 +-
 .../0.12.0.2.0/package/scripts/status_params.py |   8 +-
 .../HIVE/0.12.0.2.0/package/scripts/webhcat.py  |   2 +-
 .../2.0.6/hooks/after-INSTALL/scripts/params.py |  11 +-
 .../2.0.6/hooks/before-ANY/scripts/params.py    |  53 +++----
 .../before-ANY/scripts/shared_initialization.py |   8 -
 .../2.0.6/hooks/before-START/scripts/params.py  |  17 +-
 .../services/ECS/package/scripts/params.py      |   2 +-
 .../stacks/2.0.6/HBASE/test_hbase_client.py     |   1 -
 .../stacks/2.0.6/HBASE/test_hbase_master.py     |   6 +-
 .../2.0.6/HBASE/test_phoenix_queryserver.py     |   7 +
 .../python/stacks/2.0.6/HDFS/test_datanode.py   |  38 ++---
 .../python/stacks/2.0.6/HDFS/test_namenode.py   |   5 +-
 .../stacks/2.0.6/HIVE/test_hive_metastore.py    |  42 +++--
 .../stacks/2.0.6/HIVE/test_hive_server.py       | 158 ++++++++++---------
 .../2.0.6/HIVE/test_hive_service_check.py       |   4 +-
 .../stacks/2.0.6/HIVE/test_webhcat_server.py    |  41 +++--
 .../stacks/2.0.6/OOZIE/test_oozie_server.py     |   9 ++
 .../2.0.6/OOZIE/test_oozie_service_check.py     |   5 +-
 .../stacks/2.0.6/YARN/test_historyserver.py     |   5 +-
 .../stacks/2.0.6/YARN/test_mapreduce2_client.py |   1 +
 .../hooks/after-INSTALL/test_after_install.py   |  12 +-
 .../2.0.6/hooks/before-ANY/test_before_any.py   |   9 --
 .../stacks/2.1/FALCON/test_falcon_server.py     |  26 +--
 .../stacks/2.1/HIVE/test_hive_metastore.py      |  54 ++++---
 .../stacks/2.2/PIG/test_pig_service_check.py    |  13 ++
 .../stacks/2.2/SPARK/test_job_history_server.py |  18 +--
 .../stacks/2.3/MAHOUT/test_mahout_client.py     |   2 +-
 .../2.3/MAHOUT/test_mahout_service_check.py     |  28 ++--
 .../2.3/SPARK/test_spark_thrift_server.py       |   8 +-
 .../stacks/2.5/RANGER_KMS/test_kms_server.py    |  24 +--
 .../python/stacks/2.5/SPARK/test_spark_livy.py  |  16 +-
 .../stacks/2.5/ZEPPELIN/test_zeppelin_060.py    |  20 +--
 .../stacks/2.5/configs/ranger-kms-secured.json  |   6 +-
 .../test/python/stacks/2.6/DRUID/test_druid.py  |  20 +--
 .../stacks/2.6/SPARK2/test_spark_livy2.py       |  16 +-
 .../stacks/2.6/ZEPPELIN/test_zeppelin_070.py    |  44 +++---
 42 files changed, 472 insertions(+), 467 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-common/src/main/python/resource_management/libraries/functions/component_version.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/component_version.py b/ambari-common/src/main/python/resource_management/libraries/functions/component_version.py
index a1fd6b2..169b339 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/component_version.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/component_version.py
@@ -20,7 +20,7 @@ limitations under the License.
 
 from resource_management.libraries.script.script import Script
 
-def get_component_repository_version(service_name, component_name = None):
+def get_component_repository_version(service_name = None, component_name = None):
   """
   Gets the version associated with the specified component from the structure in the command.
   Every command should contain a mapping of service/component to the desired repository it's set
@@ -29,11 +29,16 @@ def get_component_repository_version(service_name, component_name = None):
   :service_name: the name of the service
   :component_name: the name of the component
   """
-  versions = _get_component_repositories()
+  config = Script.get_config()
+
+  versions = _get_component_repositories(config)
   if versions is None:
     return None
 
-  if service_name not in versions:
+  if service_name is None:
+    service_name = config['serviceName'] if config is not None and 'serviceName' in config else None
+
+  if service_name is None or service_name not in versions:
     return None
 
   component_versions = versions[service_name]
@@ -41,22 +46,23 @@ def get_component_repository_version(service_name, component_name = None):
     return None
 
   if component_name is None:
-    for component in component_versions:
-      return component_versions[component]
+    component_name = config["role"] if config is not None and "role" in config else None
 
-  if not component_name in component_versions:
-    return None
+  # return a direct match of component name
+  if component_name is not None and component_name in component_versions:
+    return component_versions[component_name]
 
-  return component_versions[component_name]
+  # fall back to the first one for the service
+  return component_versions.values()[0]
 
 
-def _get_component_repositories():
+def _get_component_repositories(config):
   """
   Gets an initialized dictionary from the value in componentVersionMap. This structure is
   sent on every command by Ambari and should contain each service & component's desired repository.
+  :config:  the configuration dictionary
   :return:
   """
-  config = Script.get_config()
   if "componentVersionMap" not in config or config["componentVersionMap"] is "":
     return None
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
index ffcaad5..86821bf 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
@@ -34,9 +34,11 @@ from resource_management.core.logger import Logger
 from resource_management.core.resources.system import Directory
 from resource_management.core.resources.system import Execute
 from resource_management.core.resources.system import Link
+from resource_management.libraries.functions import component_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import stack_tools
 from resource_management.core.exceptions import Fail
+from resource_management.core import sudo
 from resource_management.core.shell import as_sudo
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions import StackFeature
@@ -215,79 +217,28 @@ def select(stack_name, package, version, try_create=True, ignore_errors=False):
 
 
 
-def get_hadoop_conf_dir(force_latest_on_upgrade=False):
+def get_hadoop_conf_dir():
   """
-  Gets the shared hadoop conf directory using:
-  1.  Start with /etc/hadoop/conf
-  2.  When the stack is greater than HDP-2.2, use <stack-root>/current/hadoop-client/conf
-  3.  Only when doing a RU and HDP-2.3 or higher, use the value as computed
-      by <conf-selector-tool>.  This is in the form <stack-root>/VERSION/hadoop/conf to make sure
-      the configs are written in the correct place. However, if the component itself has
-      not yet been upgraded, it should use the hadoop configs from the prior version.
-      This will perform an <stack-selector-tool> status to determine which version to use.
-  :param force_latest_on_upgrade:  if True, then force the returned path to always
-  be that of the upgrade target version, even if <stack-selector-tool> has not been called. This
-  is primarily used by hooks like before-ANY to ensure that hadoop environment
-  configurations are written to the correct location since they are written out
-  before the <stack-selector-tool>/<conf-selector-tool> would have been called.
+  Return the hadoop shared conf directory which should be used for the command's component. The
+  directory including the component's version is tried first, but if that doesn't exist,
+  this will fallback to using "current".
   """
-  hadoop_conf_dir = "/etc/hadoop/conf"
-  stack_name = None
   stack_root = Script.get_stack_root()
   stack_version = Script.get_stack_version()
-  version = None
 
-  if not Script.in_stack_upgrade():
-    # During normal operation, the HDP stack must be 2.3 or higher
-    if stack_version and check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version):
-      hadoop_conf_dir = os.path.join(stack_root, "current", "hadoop-client", "conf")
-
-    if stack_version and check_stack_feature(StackFeature.CONFIG_VERSIONING, stack_version):
-      hadoop_conf_dir = os.path.join(stack_root, "current", "hadoop-client", "conf")
-      stack_name = default("/hostLevelParams/stack_name", None)
+  hadoop_conf_dir = os.path.join(os.path.sep, "etc", "hadoop", "conf")
+  if check_stack_feature(StackFeature.CONFIG_VERSIONING, stack_version):
+    # read the desired version from the component map and use that for building the hadoop home
+    version = component_version.get_component_repository_version()
+    if version is None:
       version = default("/commandParams/version", None)
 
-      if not os.path.islink(hadoop_conf_dir) and stack_name and version:
-        version = str(version)
-  else:
-    # The "stack_version" is the desired stack, e.g., 2.2 or 2.3
-    # In an RU, it is always the desired stack, and doesn't change even during the Downgrade!
-    # In an RU Downgrade from HDP 2.3 to 2.2, the first thing we do is
-    # rm /etc/[component]/conf and then mv /etc/[component]/conf.backup /etc/[component]/conf
-    if stack_version and check_stack_feature(StackFeature.ROLLING_UPGRADE, stack_version):
+    hadoop_conf_dir = os.path.join(stack_root, str(version), "hadoop", "conf")
+    if version is None or sudo.path_isdir(hadoop_conf_dir) is False:
       hadoop_conf_dir = os.path.join(stack_root, "current", "hadoop-client", "conf")
 
-      # This contains the "version", including the build number, that is actually used during a stack upgrade and
-      # is the version upgrading/downgrading to.
-      stack_info = stack_select._get_upgrade_stack()
-
-      if stack_info is None:
-        raise Fail("Unable to retrieve the upgrade/downgrade stack information from the request")
-
-      stack_name = stack_info[0]
-      version = stack_info[1]
-
-      Logger.info(
-        "An upgrade/downgrade for {0}-{1} is in progress, determining which hadoop conf dir to use.".format(
-          stack_name, version))
-
-      # This is the version either upgrading or downgrading to.
-      if version and check_stack_feature(StackFeature.CONFIG_VERSIONING, version):
-        # Determine if <stack-selector-tool> has been run and if not, then use the current
-        # hdp version until this component is upgraded.
-        if not force_latest_on_upgrade:
-          current_stack_version = stack_select.get_role_component_current_stack_version()
-          if current_stack_version is not None and version != current_stack_version:
-            version = current_stack_version
-            stack_selector_name = stack_tools.get_stack_tool_name(stack_tools.STACK_SELECTOR_NAME)
-            Logger.info("{0} has not yet been called to update the symlink for this component, "
-                        "keep using version {1}".format(stack_selector_name, current_stack_version))
-
-        # Only change the hadoop_conf_dir path, don't <conf-selector-tool> this older version
-        hadoop_conf_dir = os.path.join(stack_root, version, "hadoop", "conf")
-        Logger.info("Hadoop conf dir: {0}".format(hadoop_conf_dir))
-
-  Logger.info("Using hadoop conf dir: {0}".format(hadoop_conf_dir))
+    Logger.info("Using hadoop conf dir: {0}".format(hadoop_conf_dir))
+
   return hadoop_conf_dir
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py b/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
index e506974..9b7d0eb 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
@@ -28,16 +28,18 @@ import ambari_simplejson as json
 from resource_management.core.logger import Logger
 from resource_management.core.exceptions import Fail
 from resource_management.core.resources.system import Execute
+from resource_management.libraries.functions import component_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.get_stack_version import get_stack_version
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions import stack_tools
 from resource_management.core import shell
+from resource_management.core import sudo
 from resource_management.core.shell import call
 from resource_management.libraries.functions.version import format_stack_version
 from resource_management.libraries.functions.version_select_util import get_versions_from_stack_root
-from resource_management.libraries.functions.stack_features import check_stack_feature
+from resource_management.libraries.functions import stack_features
 from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions import upgrade_summary
 
@@ -356,17 +358,13 @@ def get_role_component_current_stack_version():
   return current_stack_version
 
 
-def get_hadoop_dir(target, force_latest_on_upgrade=False):
+def get_hadoop_dir(target):
   """
-  Return the hadoop shared directory in the following override order
-  1. Use default for 2.1 and lower
-  2. If 2.2 and higher, use <stack-root>/current/hadoop-client/{target}
-  3. If 2.2 and higher AND for an upgrade, use <stack-root>/<version>/hadoop/{target}.
-  However, if the upgrade has not yet invoked <stack-selector-tool>, return the current
-  version of the component.
+  Return the hadoop shared directory which should be used for the command's component. The
+  directory including the component's version is tried first, but if that doesn't exist,
+  this will fallback to using "current".
+
   :target: the target directory
-  :force_latest_on_upgrade: if True, then this will return the "current" directory
-  without the stack version built into the path, such as <stack-root>/current/hadoop-client
   """
   stack_root = Script.get_stack_root()
   stack_version = Script.get_stack_version()
@@ -377,35 +375,26 @@ def get_hadoop_dir(target, force_latest_on_upgrade=False):
   hadoop_dir = HADOOP_DIR_DEFAULTS[target]
 
   formatted_stack_version = format_stack_version(stack_version)
-  if formatted_stack_version and  check_stack_feature(StackFeature.ROLLING_UPGRADE, formatted_stack_version):
+
+  if stack_features.check_stack_feature(StackFeature.ROLLING_UPGRADE, formatted_stack_version):
+    # read the desired version from the component map and use that for building the hadoop home
+    version = component_version.get_component_repository_version()
+    if version is None:
+      version = default("/commandParams/version", None)
+
     # home uses a different template
     if target == "home":
-      hadoop_dir = HADOOP_HOME_DIR_TEMPLATE.format(stack_root, "current", "hadoop-client")
+      hadoop_dir = HADOOP_HOME_DIR_TEMPLATE.format(stack_root, version, "hadoop")
+      if version is None or sudo.path_isdir(hadoop_dir) is False:
+        hadoop_dir = HADOOP_HOME_DIR_TEMPLATE.format(stack_root, "current", "hadoop-client")
     else:
-      hadoop_dir = HADOOP_DIR_TEMPLATE.format(stack_root, "current", "hadoop-client", target)
-
-    # if we are not forcing "current" for HDP 2.2, then attempt to determine
-    # if the exact version needs to be returned in the directory
-    if not force_latest_on_upgrade:
-      stack_info = _get_upgrade_stack()
-
-      if stack_info is not None:
-        stack_version = stack_info[1]
-
-        # determine if <stack-selector-tool> has been run and if not, then use the current
-        # hdp version until this component is upgraded
-        current_stack_version = get_role_component_current_stack_version()
-        if current_stack_version is not None and stack_version != current_stack_version:
-          stack_version = current_stack_version
-
-        if target == "home":
-          # home uses a different template
-          hadoop_dir = HADOOP_HOME_DIR_TEMPLATE.format(stack_root, stack_version, "hadoop")
-        else:
-          hadoop_dir = HADOOP_DIR_TEMPLATE.format(stack_root, stack_version, "hadoop", target)
+      hadoop_dir = HADOOP_DIR_TEMPLATE.format(stack_root, version, "hadoop", target)
+      if version is None or sudo.path_isdir(hadoop_dir) is False:
+        hadoop_dir = HADOOP_DIR_TEMPLATE.format(stack_root, "current", "hadoop-client", target)
 
   return hadoop_dir
 
+
 def get_hadoop_dir_for_stack_version(target, stack_version):
   """
   Return the hadoop shared directory for the provided stack version. This is necessary
@@ -418,15 +407,11 @@ def get_hadoop_dir_for_stack_version(target, stack_version):
   if not target in HADOOP_DIR_DEFAULTS:
     raise Fail("Target {0} not defined".format(target))
 
-  hadoop_dir = HADOOP_DIR_DEFAULTS[target]
-
-  formatted_stack_version = format_stack_version(stack_version)
-  if formatted_stack_version and  check_stack_feature(StackFeature.ROLLING_UPGRADE, formatted_stack_version):
-    # home uses a different template
-    if target == "home":
-      hadoop_dir = HADOOP_HOME_DIR_TEMPLATE.format(stack_root, stack_version, "hadoop")
-    else:
-      hadoop_dir = HADOOP_DIR_TEMPLATE.format(stack_root, stack_version, "hadoop", target)
+  # home uses a different template
+  if target == "home":
+    hadoop_dir = HADOOP_HOME_DIR_TEMPLATE.format(stack_root, stack_version, "hadoop")
+  else:
+    hadoop_dir = HADOOP_DIR_TEMPLATE.format(stack_root, stack_version, "hadoop", target)
 
   return hadoop_dir
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/main/java/org/apache/ambari/server/agent/StatusCommand.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/StatusCommand.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/StatusCommand.java
index e8c0b05..9f8a095 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/StatusCommand.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/StatusCommand.java
@@ -39,6 +39,9 @@ public class StatusCommand extends AgentCommand {
   @SerializedName("serviceName")
   private String serviceName;
 
+  @SerializedName("role")
+  private String role;
+
   @SerializedName("componentName")
   private String componentName;
 
@@ -121,8 +124,17 @@ public class StatusCommand extends AgentCommand {
     return componentName;
   }
 
+  /**
+   * Sets both the {@code componentName} and the {@code role}. Status commands
+   * use the {@code componentName}, while execution commands use the
+   * {@code role}. It's simpler for the Python to just worry about {@code role},
+   * so this ensures that both are set.
+   *
+   * @param componentName
+   */
   public void setComponentName(String componentName) {
     this.componentName = componentName;
+    role = componentName;
   }
 
   public Map<String, Map<String, String>> getConfigurations() {
@@ -165,6 +177,10 @@ public class StatusCommand extends AgentCommand {
     return hostname;
   }
 
+  public String getRole() {
+    return role;
+  }
+
   public enum StatusCommandPayload {
     // The minimal payload for status, agent adds necessary details
     MINIMAL,

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py
index 12c9e1c..1cb95ff 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_upgrade.py
@@ -58,11 +58,6 @@ def deregister():
   if current_hiveserver_version is None:
     raise Fail('Unable to determine the current HiveServer2 version to deregister.')
 
-  # fallback when upgrading because <stack-root>/current/hive-server2/conf/conf.server may not exist
-  hive_server_conf_dir = params.hive_server_conf_dir
-  if not os.path.exists(hive_server_conf_dir):
-    hive_server_conf_dir = "/etc/hive/conf.server"
-
   # deregister
   hive_execute_path = params.execute_path
   # If upgrading, the upgrade-target hive binary should be used to call the --deregister command.

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
index ceeac79..1d68ef4 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
@@ -17,7 +17,6 @@ See the License for the specific language governing permissions and
 limitations under the License.
 
 """
-
 import status_params
 import ambari_simplejson as json # simplejson is much faster comparing to Python 2.6 json module and has the same functions set.
 import os
@@ -36,6 +35,7 @@ from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions.get_not_managed_resources import get_not_managed_resources
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions import StackFeature
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.stack_features import check_stack_feature
 from resource_management.libraries.functions.stack_features import get_stack_feature_version
 from resource_management.libraries.functions.get_port_from_url import get_port_from_url
@@ -109,7 +109,8 @@ stack_supports_hive_interactive_ga = check_stack_feature(StackFeature.HIVE_INTER
 component_directory = status_params.component_directory
 component_directory_interactive = status_params.component_directory_interactive
 
-hadoop_home = format('{stack_root}/current/hadoop-client')
+hadoop_home = stack_select.get_hadoop_dir("home")
+
 hive_bin = format('{stack_root}/current/{component_directory}/bin')
 hive_schematool_ver_bin = format('{stack_root}/{version}/hive/bin')
 hive_schematool_bin = format('{stack_root}/current/{component_directory}/bin')

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
index 98a50a8..490d090 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
@@ -24,7 +24,7 @@ from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import StackFeature
-from resource_management.libraries.functions.stack_features import check_stack_feature
+from resource_management.libraries.functions import stack_features
 from resource_management.libraries.functions.version import format_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
@@ -97,17 +97,17 @@ else:
   hive_conf_dir = format("{stack_root}/current/{component_directory}/conf")
   hive_client_conf_dir = format("{stack_root}/current/{component_directory}/conf")
 
-  if check_stack_feature(StackFeature.CONFIG_VERSIONING, stack_version_formatted_major):
+  if stack_features.check_stack_feature(StackFeature.CONFIG_VERSIONING, stack_version_formatted_major):
     hive_server_conf_dir = format("{stack_root}/current/{component_directory}/conf/conf.server")
     hive_conf_dir = hive_server_conf_dir
 
-  if check_stack_feature(StackFeature.HIVE_WEBHCAT_SPECIFIC_CONFIGS, stack_version_formatted_major):
+  if stack_features.check_stack_feature(StackFeature.HIVE_WEBHCAT_SPECIFIC_CONFIGS, stack_version_formatted_major):
     # this is NOT a typo. Configs for hcatalog/webhcat point to a
     # specific directory which is NOT called 'conf'
     webhcat_conf_dir = format("{stack_root}/current/hive-webhcat/etc/webhcat")
 
   # if stack version supports hive serve interactive
-  if check_stack_feature(StackFeature.HIVE_SERVER_INTERACTIVE, stack_version_formatted_major):
+  if stack_features.check_stack_feature(StackFeature.HIVE_SERVER_INTERACTIVE, stack_version_formatted_major):
     hive_server_interactive_conf_dir = format("{stack_root}/current/{component_directory_interactive}/conf/conf.server")
 
   hive_config_dir = hive_client_conf_dir

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
index 66731f8..924fe73 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
@@ -86,7 +86,7 @@ def webhcat():
             )
 
   # if we're in an upgrade of a secure cluster, make sure hive-site and yarn-site are created
-  if params.stack_version_formatted_major  and check_stack_feature(StackFeature.CONFIG_VERSIONING, params.stack_version_formatted_major) and \
+  if check_stack_feature(StackFeature.CONFIG_VERSIONING, params.stack_version_formatted_major) and \
        params.version and params.stack_root:
     XmlConfig("hive-site.xml",
       conf_dir = format("{stack_root}/{version}/hive/conf"),

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
index b517eba..bf9d79b 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
@@ -57,16 +57,9 @@ logsearch_config_file_path = agent_cache_dir + "/" + service_package_folder + "/
 logsearch_config_file_exists = os.path.isfile(logsearch_config_file_path)
 
 # default hadoop params
-mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
 hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec")
-hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
 
-# HDP 2.2+ params
-if Script.is_stack_greater_or_equal("2.2"):
-  mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
-
-  # not supported in HDP 2.2+
-  hadoop_conf_empty_dir = None
+mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
 
 versioned_stack_root = '/usr/hdp/current'
 
@@ -107,7 +100,7 @@ namenode_host = default("/clusterHostInfo/namenode_host", [])
 has_namenode = not len(namenode_host) == 0
 
 if has_namenode or dfs_type == 'HCFS':
-  hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
+  hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 
 link_configs_lock_file = get_config_lock_file()
 stack_select_lock_file = os.path.join(tmp_dir, "stack_select_lock_file")

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
index e62bf03..20992e2 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
@@ -103,49 +103,38 @@ def is_secure_port(port):
   else:
     return False
 
-# hadoop default params
-mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
-
 # upgrades would cause these directories to have a version instead of "current"
 # which would cause a lot of problems when writing out hadoop-env.sh; instead
 # force the use of "current" in the hook
 hdfs_user_nofile_limit = default("/configurations/hadoop-env/hdfs_user_nofile_limit", "128000")
-hadoop_home = stack_select.get_hadoop_dir("home", force_latest_on_upgrade=True)
-hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec", force_latest_on_upgrade=True)
+hadoop_home = stack_select.get_hadoop_dir("home")
+hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec")
 hadoop_lib_home = stack_select.get_hadoop_dir("lib")
 
-hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
-hadoop_secure_dn_user = hdfs_user
 hadoop_dir = "/etc/hadoop"
-versioned_stack_root = '/usr/hdp/current'
 hadoop_java_io_tmpdir = os.path.join(tmp_dir, "hadoop_java_io_tmpdir")
 datanode_max_locked_memory = config['configurations']['hdfs-site']['dfs.datanode.max.locked.memory']
 is_datanode_max_locked_memory_set = not is_empty(config['configurations']['hdfs-site']['dfs.datanode.max.locked.memory'])
 
-# HDP 2.2+ params
-if Script.is_stack_greater_or_equal("2.2"):
-  mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
-
-  # not supported in HDP 2.2+
-  hadoop_conf_empty_dir = None
-
-  if not security_enabled:
-    hadoop_secure_dn_user = '""'
+mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
+
+if not security_enabled:
+  hadoop_secure_dn_user = '""'
+else:
+  dfs_dn_port = get_port(dfs_dn_addr)
+  dfs_dn_http_port = get_port(dfs_dn_http_addr)
+  dfs_dn_https_port = get_port(dfs_dn_https_addr)
+  # We try to avoid inability to start datanode as a plain user due to usage of root-owned ports
+  if dfs_http_policy == "HTTPS_ONLY":
+    secure_dn_ports_are_in_use = is_secure_port(dfs_dn_port) or is_secure_port(dfs_dn_https_port)
+  elif dfs_http_policy == "HTTP_AND_HTTPS":
+    secure_dn_ports_are_in_use = is_secure_port(dfs_dn_port) or is_secure_port(dfs_dn_http_port) or is_secure_port(dfs_dn_https_port)
+  else:   # params.dfs_http_policy == "HTTP_ONLY" or not defined:
+    secure_dn_ports_are_in_use = is_secure_port(dfs_dn_port) or is_secure_port(dfs_dn_http_port)
+  if secure_dn_ports_are_in_use:
+    hadoop_secure_dn_user = hdfs_user
   else:
-    dfs_dn_port = get_port(dfs_dn_addr)
-    dfs_dn_http_port = get_port(dfs_dn_http_addr)
-    dfs_dn_https_port = get_port(dfs_dn_https_addr)
-    # We try to avoid inability to start datanode as a plain user due to usage of root-owned ports
-    if dfs_http_policy == "HTTPS_ONLY":
-      secure_dn_ports_are_in_use = is_secure_port(dfs_dn_port) or is_secure_port(dfs_dn_https_port)
-    elif dfs_http_policy == "HTTP_AND_HTTPS":
-      secure_dn_ports_are_in_use = is_secure_port(dfs_dn_port) or is_secure_port(dfs_dn_http_port) or is_secure_port(dfs_dn_https_port)
-    else:   # params.dfs_http_policy == "HTTP_ONLY" or not defined:
-      secure_dn_ports_are_in_use = is_secure_port(dfs_dn_port) or is_secure_port(dfs_dn_http_port)
-    if secure_dn_ports_are_in_use:
-      hadoop_secure_dn_user = hdfs_user
-    else:
-      hadoop_secure_dn_user = '""'
+    hadoop_secure_dn_user = '""'
 
 #hadoop params
 hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
@@ -221,7 +210,7 @@ if dfs_ha_namenode_ids:
     dfs_ha_enabled = True
 
 if has_namenode or dfs_type == 'HCFS':
-    hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
+    hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
     hadoop_conf_secure_dir = os.path.join(hadoop_conf_dir, "secure")
 
 hbase_tmp_dir = "/tmp/hbase-hbase"

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py
index 11593fe..27679e0 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/shared_initialization.py
@@ -189,14 +189,6 @@ def setup_hadoop_env():
     # create /etc/hadoop
     Directory(params.hadoop_dir, mode=0755)
 
-    # HDP < 2.2 used a conf -> conf.empty symlink for /etc/hadoop/
-    if Script.is_stack_less_than("2.2"):
-      Directory(params.hadoop_conf_empty_dir, create_parents = True, owner="root",
-        group=params.user_group )
-
-      Link(params.hadoop_conf_dir, to=params.hadoop_conf_empty_dir,
-         not_if=format("ls {hadoop_conf_dir}"))
-
     # write out hadoop-env.sh, but only if the directory exists
     if os.path.exists(params.hadoop_conf_dir):
       File(os.path.join(params.hadoop_conf_dir, 'hadoop-env.sh'), owner=tc_owner,

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
index c8880ae..6c26e01 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
@@ -68,20 +68,13 @@ hadoop_metrics2_properties_content = None
 if 'hadoop-metrics2.properties' in config['configurations']:
   hadoop_metrics2_properties_content = config['configurations']['hadoop-metrics2.properties']['content']
 
-# hadoop default params
-mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
-
 hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec")
 hadoop_lib_home = stack_select.get_hadoop_dir("lib")
 hadoop_bin = stack_select.get_hadoop_dir("sbin")
-hadoop_home = '/usr'
-create_lib_snappy_symlinks = True
-
-# HDP 2.2+ params
-if Script.is_stack_greater_or_equal("2.2"):
-  mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
-  hadoop_home = stack_select.get_hadoop_dir("home")
-  create_lib_snappy_symlinks = False
+
+mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
+hadoop_home = stack_select.get_hadoop_dir("home")
+create_lib_snappy_symlinks = False
   
 current_service = config['serviceName']
 
@@ -192,7 +185,7 @@ if has_zk_host:
 
 if has_namenode or dfs_type == 'HCFS':
   hadoop_tmp_dir = format("/tmp/hadoop-{hdfs_user}")
-  hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
+  hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
   task_log4j_properties_location = os.path.join(hadoop_conf_dir, "task-log4j.properties")
 
 hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/main/resources/stacks/HDP/2.3.ECS/services/ECS/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3.ECS/services/ECS/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.3.ECS/services/ECS/package/scripts/params.py
index c304a93..652c23e 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3.ECS/services/ECS/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3.ECS/services/ECS/package/scripts/params.py
@@ -49,7 +49,7 @@ smoke_hdfs_user_mode = 0770
 java64_home = config['hostLevelParams']['java_home']
 java_version = int(config['hostLevelParams']['java_version'])
 
-hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
+hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 security_enabled = config['configurations']['cluster-env']['security_enabled']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hadoop_dir = "/etc/hadoop"

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_client.py b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_client.py
index 135b239..1cde55a 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_client.py
@@ -223,7 +223,6 @@ class TestHBaseClient(RMFTestCase):
     self.assertResourceCalled("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hbase-client', '2.2.1.0-2067'), sudo=True)
     self.assertResourceCalled('Execute', ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'phoenix-client', '2.2.1.0-2067'), sudo=True)
     self.assertResourceCalled("Execute", ('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'hadoop-client', '2.2.1.0-2067'), sudo=True)
-    self.assertEquals(1, mocks_dict['call'].call_count)
 
 
   @patch("resource_management.core.shell.call")

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
index b7d6abf..2224d31 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
@@ -678,7 +678,7 @@ class TestHBaseMaster(RMFTestCase):
         user = 'hdfs',
         dfs_type = '',
         owner = 'hbase',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
     )
@@ -694,7 +694,7 @@ class TestHBaseMaster(RMFTestCase):
         user = 'hdfs',
         dfs_type = '',
         owner = 'hbase',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/etc/hadoop/conf',
         type = 'directory',
         action = ['create_on_execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
         mode = 0711,
@@ -711,7 +711,7 @@ class TestHBaseMaster(RMFTestCase):
         user = 'hdfs',
         dfs_type = '',
         action = ['execute'], hdfs_resource_ignore_file='/var/lib/ambari-agent/data/.hdfs_resource_ignore',
-        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        hadoop_conf_dir = '/etc/hadoop/conf',
     )
 
     self.assertResourceCalled('Execute', '/usr/hdp/current/hbase-master/bin/hbase-daemon.sh --config /usr/hdp/current/hbase-master/conf start master',

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
index 972aa61..f27a3b9 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
@@ -34,6 +34,7 @@ class TestPhoenixQueryServer(RMFTestCase):
 
   CONFIG_OVERRIDES = {"serviceName":"HBASE", "role":"PHOENIX_QUERY_SERVER"}
 
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
   def test_configure_default(self):
     self.executeScript(
       self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/phoenix_queryserver.py",
@@ -48,6 +49,7 @@ class TestPhoenixQueryServer(RMFTestCase):
     self.assert_configure_default()
     self.assertNoMoreResources()
 
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
   def test_start_default(self):
     self.executeScript(
       self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/phoenix_queryserver.py",
@@ -67,6 +69,7 @@ class TestPhoenixQueryServer(RMFTestCase):
     )
     self.assertNoMoreResources()
 
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
   def test_stop_default(self):
     self.executeScript(
       self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/phoenix_queryserver.py",
@@ -90,6 +93,7 @@ class TestPhoenixQueryServer(RMFTestCase):
     )
     self.assertNoMoreResources()
 
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
   def test_configure_secured(self):
     self.executeScript(
       self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/phoenix_queryserver.py",
@@ -104,6 +108,7 @@ class TestPhoenixQueryServer(RMFTestCase):
     self.assert_configure_secured()
     self.assertNoMoreResources()
 
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
   def test_start_secured(self):
     self.executeScript(
       self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/phoenix_queryserver.py",
@@ -123,6 +128,7 @@ class TestPhoenixQueryServer(RMFTestCase):
     )
     self.assertNoMoreResources()
 
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
   def test_stop_secured(self):
     self.executeScript(
       self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/phoenix_queryserver.py",
@@ -422,6 +428,7 @@ class TestPhoenixQueryServer(RMFTestCase):
       content = InlineTemplate('log4jproperties\nline2')
     )
 
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
   def test_upgrade_restart(self):
     config_file = self.get_src_folder()+"/test/python/stacks/2.3/configs/hbase_default.json"
     with open(config_file, "r") as f:

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
index 24b0347..b1a4154 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
@@ -25,9 +25,11 @@ from resource_management.core import shell
 import itertools
 from resource_management.core.exceptions import Fail
 import resource_management.libraries.functions.mounted_dirs_helper
+from resource_management.libraries.functions import conf_select
 
 @patch.object(resource_management.libraries.functions, 'check_process_status', new = MagicMock())
 @patch.object(Script, 'format_package_name', new = MagicMock())
+@patch.object(conf_select, "get_hadoop_conf_dir", new=MagicMock(return_value="/usr/hdp/current/hadoop-client/conf"))
 class TestDatanode(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "HDFS/2.1.0.2.0/package"
   STACK_VERSION = "2.0.6"
@@ -73,7 +75,7 @@ class TestDatanode(RMFTestCase):
         action = ['delete'],
         not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid",
     )
-    self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode'",
+    self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /usr/hdp/current/hadoop-client/conf start datanode'",
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid",
     )
@@ -96,7 +98,7 @@ class TestDatanode(RMFTestCase):
                        checked_call_mocks = side_effect,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
-    self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop datanode'",
+    self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /usr/hdp/current/hadoop-client/conf stop datanode'",
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         only_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid")
 
@@ -143,7 +145,7 @@ class TestDatanode(RMFTestCase):
         action = ['delete'],
         not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid",
     )
-    self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf start datanode',
+    self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /usr/hdp/current/hadoop-client/conf start datanode',
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid",
     )
@@ -163,7 +165,7 @@ class TestDatanode(RMFTestCase):
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
-    self.assert_configure_secured("2.2", snappy_enabled=False)
+    self.assert_configure_secured("2.3", snappy_enabled=False)
     self.assertResourceCalled('Directory', '/var/run/hadoop',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -183,8 +185,8 @@ class TestDatanode(RMFTestCase):
         action = ['delete'],
         not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid",
     )
-    self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E /usr/hdp/current/hadoop-client/sbin/hadoop-daemon.sh --config /usr/hdp/current/hadoop-client/conf start datanode',
-        environment = {'HADOOP_LIBEXEC_DIR': '/usr/hdp/current/hadoop-client/libexec'},
+    self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E /usr/hdp/2.1.0.0-1234/hadoop/sbin/hadoop-daemon.sh --config /usr/hdp/current/hadoop-client/conf start datanode',
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/hdp/2.1.0.0-1234/hadoop/libexec'},
         not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid",
     )
     self.assertNoMoreResources()
@@ -206,7 +208,7 @@ class TestDatanode(RMFTestCase):
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
-    self.assert_configure_secured("2.2", snappy_enabled=False)
+    self.assert_configure_secured("2.3", snappy_enabled=False)
     self.assertResourceCalled('Directory', '/var/run/hadoop',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -226,8 +228,8 @@ class TestDatanode(RMFTestCase):
         action = ['delete'],
         not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid",
     )
-    self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  /usr/hdp/current/hadoop-client/sbin/hadoop-daemon.sh --config /usr/hdp/current/hadoop-client/conf start datanode'",
-        environment = {'HADOOP_LIBEXEC_DIR': '/usr/hdp/current/hadoop-client/libexec'},
+    self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  /usr/hdp/2.1.0.0-1234/hadoop/sbin/hadoop-daemon.sh --config /usr/hdp/current/hadoop-client/conf start datanode'",
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/hdp/2.1.0.0-1234/hadoop/libexec'},
         not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid",
     )
     self.assertNoMoreResources()
@@ -249,7 +251,7 @@ class TestDatanode(RMFTestCase):
                        checked_call_mocks = side_effect,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
-    self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /etc/hadoop/conf stop datanode',
+    self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E /usr/lib/hadoop/sbin/hadoop-daemon.sh --config /usr/hdp/current/hadoop-client/conf stop datanode',
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         only_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid")
 
@@ -279,8 +281,8 @@ class TestDatanode(RMFTestCase):
                        checked_call_mocks = side_effect,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
-    self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E /usr/hdp/current/hadoop-client/sbin/hadoop-daemon.sh --config /usr/hdp/current/hadoop-client/conf stop datanode',
-        environment = {'HADOOP_LIBEXEC_DIR': '/usr/hdp/current/hadoop-client/libexec'},
+    self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E /usr/hdp/2.1.0.0-1234/hadoop/sbin/hadoop-daemon.sh --config /usr/hdp/current/hadoop-client/conf stop datanode',
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/hdp/2.1.0.0-1234/hadoop/libexec'},
         only_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid")
 
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid', action = ['delete'])
@@ -312,8 +314,8 @@ class TestDatanode(RMFTestCase):
                        checked_call_mocks = side_effect,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
-    self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  /usr/hdp/current/hadoop-client/sbin/hadoop-daemon.sh --config /usr/hdp/current/hadoop-client/conf stop datanode'",
-        environment = {'HADOOP_LIBEXEC_DIR': '/usr/hdp/current/hadoop-client/libexec'},
+    self.assertResourceCalled('Execute', "ambari-sudo.sh su hdfs -l -s /bin/bash -c '[RMF_EXPORT_PLACEHOLDER]ulimit -c unlimited ;  /usr/hdp/2.1.0.0-1234/hadoop/sbin/hadoop-daemon.sh --config /usr/hdp/current/hadoop-client/conf stop datanode'",
+        environment = {'HADOOP_LIBEXEC_DIR': '/usr/hdp/2.1.0.0-1234/hadoop/libexec'},
         only_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid")
 
     self.assertResourceCalled('File', '/var/run/hadoop/hdfs/hadoop-hdfs-datanode.pid', action = ['delete'])
@@ -346,19 +348,19 @@ class TestDatanode(RMFTestCase):
     self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
                               owner = 'hdfs',
                               group = 'hadoop',
-                              conf_dir = '/etc/hadoop/conf',
+                              conf_dir = '/usr/hdp/current/hadoop-client/conf',
                               configurations = self.getConfig()['configurations']['hdfs-site'],
                               configuration_attributes = self.getConfig()['configuration_attributes']['hdfs-site']
                               )
     self.assertResourceCalled('XmlConfig', 'core-site.xml',
                               owner = 'hdfs',
                               group = 'hadoop',
-                              conf_dir = '/etc/hadoop/conf',
+                              conf_dir = '/usr/hdp/current/hadoop-client/conf',
                               configurations = self.getConfig()['configurations']['core-site'],
                               configuration_attributes = self.getConfig()['configuration_attributes']['core-site'],
                               mode = 0644
                               )
-    self.assertResourceCalled('File', '/etc/hadoop/conf/slaves',
+    self.assertResourceCalled('File', '/usr/hdp/current/hadoop-client/conf/slaves',
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               )
@@ -390,7 +392,7 @@ class TestDatanode(RMFTestCase):
                               )
 
   def assert_configure_secured(self, stackVersion=STACK_VERSION, snappy_enabled=True):
-    conf_dir = '/etc/hadoop/conf'
+    conf_dir = '/usr/hdp/current/hadoop-client/conf'
     if stackVersion != self.STACK_VERSION:
       conf_dir = '/usr/hdp/current/hadoop-client/conf'
     

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
index b26c8fb..06e12f6 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
@@ -1358,6 +1358,7 @@ class TestNamenode(RMFTestCase):
   @patch("hdfs_namenode.is_this_namenode_active")
   @patch("resource_management.libraries.functions.setup_ranger_plugin_xml.setup_ranger_plugin")
   @patch("utils.get_namenode_states")
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
   def test_upgrade_restart_eu_with_ranger(self, get_namenode_states_mock, setup_ranger_plugin_mock, is_active_nn_mock):
     is_active_nn_mock.return_value = True
 
@@ -1614,6 +1615,7 @@ class TestNamenode(RMFTestCase):
     self.assertEquals("/usr/lib/hadoop/sbin", sys.modules["params"].hadoop_bin)
 
   @patch.object(shell, "call")
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
   def test_pre_upgrade_restart_22_params(self, call_mock):
     config_file = self.get_src_folder()+"/test/python/stacks/2.0.6/configs/nn_ru_lzo.json"
     with open(config_file, "r") as f:
@@ -1634,7 +1636,7 @@ class TestNamenode(RMFTestCase):
                        call_mocks = [(0, None), (0, None), (0, None), (0, None), (0, None), (0, None), (0, None)],
                        mocks_dict = mocks_dict)
     import sys
-    self.assertEquals("/usr/hdp/current/hadoop-client/conf", sys.modules["params"].hadoop_conf_dir)
+    self.assertEquals("/etc/hadoop/conf", sys.modules["params"].hadoop_conf_dir)
     self.assertEquals("/usr/hdp/{0}/hadoop/libexec".format(version), sys.modules["params"].hadoop_libexec_dir)
     self.assertEquals("/usr/hdp/{0}/hadoop/bin".format(version), sys.modules["params"].hadoop_bin_dir)
     self.assertEquals("/usr/hdp/{0}/hadoop/sbin".format(version), sys.modules["params"].hadoop_bin)
@@ -1670,6 +1672,7 @@ class TestNamenode(RMFTestCase):
 
 
   @patch("namenode_upgrade.create_upgrade_marker", MagicMock())
+  @patch("resource_management.core.sudo.path_isdir", new = MagicMock(return_value = True))
   def test_express_upgrade_skips_safemode_and_directory_creation(self):
     """
     Tests that we wait for Safemode to be OFF no matter what except for EU. And, because of that,

http://git-wip-us.apache.org/repos/asf/ambari/blob/31344fff/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
index 9486e11..1baed03 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_metastore.py
@@ -22,7 +22,19 @@ import os
 from mock.mock import MagicMock, call, patch
 from stacks.utils.RMFTestCase import *
 
+from resource_management.libraries.functions import stack_features
+
+# used for faking out stack features when the config files used by unit tests use older stacks
+def mock_stack_feature(stack_feature, stack_version):
+  if stack_feature == "rolling_upgrade":
+    return True
+  if stack_feature == "config_versioning":
+    return True
+
+  return False
+
 @patch("resource_management.libraries.functions.get_user_call_output.get_user_call_output", new=MagicMock(return_value=(0,'123','')))
+@patch.object(stack_features, "check_stack_feature", new=MagicMock(side_effect=mock_stack_feature))
 class TestHiveMetastore(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "HIVE/0.12.0.2.0/package"
   STACK_VERSION = "2.0.6"
@@ -49,13 +61,13 @@ class TestHiveMetastore(RMFTestCase):
 
     self.assert_configure_default()
     self.assert_init_schema()
-    self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.err /var/run/hive/hive.pid /etc/hive/conf.server /var/log/hive',
+    self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.err /var/run/hive/hive.pid /usr/hdp/current/hive-server2/conf/conf.server /var/log/hive',
         environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client',
            'HIVE_BIN': 'hive',
            'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
         not_if = "ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
         user = 'hive',
-        path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/bin'],
+        path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/hdp/current/hadoop-client/bin'],
     )
     self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
         path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
@@ -110,13 +122,13 @@ class TestHiveMetastore(RMFTestCase):
     )
     self.assert_configure_secured()
     self.assert_init_schema()
-    self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.err /var/run/hive/hive.pid /etc/hive/conf.server /var/log/hive',
-        environment = {'HADOOP_HOME': '/usr/hdp/current/hadoop-client',
+    self.assertResourceCalled('Execute', '/tmp/start_metastore_script /var/log/hive/hive.out /var/log/hive/hive.err /var/run/hive/hive.pid /usr/hdp/current/hive-server2/conf/conf.server /var/log/hive',
+        environment = {'HADOOP_HOME': '/usr/hdp/2.1.0.0-1234/hadoop',
            'HIVE_BIN': 'hive',
            'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
         not_if = "ls /var/run/hive/hive.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
         user = 'hive',
-        path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/bin'],
+        path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/hdp/2.1.0.0-1234/hadoop/bin'],
     )
     self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
         path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
@@ -194,7 +206,7 @@ class TestHiveMetastore(RMFTestCase):
                               )
     self.assertResourceCalled('XmlConfig', 'hive-site.xml',
                               group = 'hadoop',
-                              conf_dir = '/etc/hive/conf.server',
+                              conf_dir = '/usr/hdp/current/hive-server2/conf/conf.server',
                               mode = 0600,
                               configuration_attributes = {u'final': {u'hive.optimize.bucketmapjoin.sortedmerge': u'true',
                                                                      u'javax.jdo.option.ConnectionDriverName': u'true',
@@ -202,7 +214,7 @@ class TestHiveMetastore(RMFTestCase):
                               owner = 'hive',
                               configurations = self.getConfig()['configurations']['hive-site'],
                               )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/conf.server/hive-env.sh',
                               content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
                               owner = 'hive',
                               group = 'hadoop',
@@ -257,7 +269,7 @@ class TestHiveMetastore(RMFTestCase):
     self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar',
         mode = 0644,
     )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hadoop-metrics2-hivemetastore.properties',
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/conf.server/hadoop-metrics2-hivemetastore.properties',
                               owner = 'hive',
                               group = 'hadoop',
                               content = Template('hadoop-metrics2-hivemetastore.properties.j2'),
@@ -269,8 +281,8 @@ class TestHiveMetastore(RMFTestCase):
                               )
 
   def assert_init_schema(self):
-    self.assertResourceCalled('Execute', 'export HIVE_CONF_DIR=/etc/hive/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -initSchema -dbType mysql -userName hive -passWord \'!`"\'"\'"\' 1\' -verbose',
-        not_if = 'ambari-sudo.sh su hive -l -s /bin/bash -c \'[RMF_EXPORT_PLACEHOLDER]export HIVE_CONF_DIR=/etc/hive/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -info -dbType mysql -userName hive -passWord \'"\'"\'!`"\'"\'"\'"\'"\'"\'"\'"\'"\' 1\'"\'"\' -verbose\'',
+    self.assertResourceCalled('Execute', 'export HIVE_CONF_DIR=/usr/hdp/current/hive-server2/conf/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -initSchema -dbType mysql -userName hive -passWord \'!`"\'"\'"\' 1\' -verbose',
+        not_if = 'ambari-sudo.sh su hive -l -s /bin/bash -c \'[RMF_EXPORT_PLACEHOLDER]export HIVE_CONF_DIR=/usr/hdp/current/hive-server2/conf/conf.server ; /usr/hdp/current/hive-server2/bin/schematool -info -dbType mysql -userName hive -passWord \'"\'"\'!`"\'"\'"\'"\'"\'"\'"\'"\'"\' 1\'"\'"\' -verbose\'',
         user = 'hive',
     )
 
@@ -317,7 +329,7 @@ class TestHiveMetastore(RMFTestCase):
                               )
     self.assertResourceCalled('XmlConfig', 'hive-site.xml',
                               group = 'hadoop',
-                              conf_dir = '/etc/hive/conf.server',
+                              conf_dir = '/usr/hdp/current/hive-server2/conf/conf.server',
                               mode = 0600,
                               configuration_attributes = {u'final': {u'hive.optimize.bucketmapjoin.sortedmerge': u'true',
                                                                      u'javax.jdo.option.ConnectionDriverName': u'true',
@@ -325,7 +337,7 @@ class TestHiveMetastore(RMFTestCase):
                               owner = 'hive',
                               configurations = self.getConfig()['configurations']['hive-site'],
                               )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hive-env.sh',
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/conf.server/hive-env.sh',
                               content = InlineTemplate(self.getConfig()['configurations']['hive-env']['content']),
                               owner = 'hive',
                               group = 'hadoop',
@@ -342,7 +354,7 @@ class TestHiveMetastore(RMFTestCase):
                               group = 'root',
                               mode = 0644,
                               )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/zkmigrator_jaas.conf',
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/conf.server/zkmigrator_jaas.conf',
                               content = Template('zkmigrator_jaas.conf.j2'),
                               owner = 'hive',
                               group = 'hadoop',
@@ -382,7 +394,7 @@ class TestHiveMetastore(RMFTestCase):
     self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar',
         mode = 0644,
     )
-    self.assertResourceCalled('File', '/etc/hive/conf.server/hadoop-metrics2-hivemetastore.properties',
+    self.assertResourceCalled('File', '/usr/hdp/current/hive-server2/conf/conf.server/hadoop-metrics2-hivemetastore.properties',
                               owner = 'hive',
                               group = 'hadoop',
                               content = Template('hadoop-metrics2-hivemetastore.properties.j2'),
@@ -543,7 +555,7 @@ class TestHiveMetastore(RMFTestCase):
         environment = {'HADOOP_HOME': '/usr/hdp/2.3.0.0-1234/hadoop', 'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45', 'HIVE_BIN': '/usr/hdp/current/hive-server2/bin/hive'},
         not_if = None,
         user = 'hive',
-        path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/hdp/current/hadoop-client/bin'])
+        path = ['/bin:/usr/hdp/current/hive-server2/bin:/usr/hdp/2.3.0.0-1234/hadoop/bin'])
 
     self.assertResourceCalled('Execute', '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/hdp/current/hive-server2/lib/mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive aaa com.mysql.jdbc.Driver',
         path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],


[07/50] [abbrv] ambari git commit: AMBARI-22041 - Intermittent bug in AMBARI-21933 patch causing wizard progress reporting to sometimes hang (Jason Golieb via jonathanhurley)

Posted by rl...@apache.org.
AMBARI-22041 - Intermittent bug in AMBARI-21933 patch causing wizard progress reporting to sometimes hang (Jason Golieb via jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/cb27cee5
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/cb27cee5
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/cb27cee5

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: cb27cee531f9321e87e2a32fcfa9879a1439340a
Parents: 1a90a52
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Tue Sep 26 14:52:41 2017 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Tue Sep 26 14:52:41 2017 -0400

----------------------------------------------------------------------
 .../main/admin/highAvailability/progress_popup_controller.js     | 2 +-
 .../admin/highAvailability/progress_popup_controller_test.js     | 4 +---
 2 files changed, 2 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/cb27cee5/ambari-web/app/controllers/main/admin/highAvailability/progress_popup_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/main/admin/highAvailability/progress_popup_controller.js b/ambari-web/app/controllers/main/admin/highAvailability/progress_popup_controller.js
index 6e0ee5e..9d8f33e 100644
--- a/ambari-web/app/controllers/main/admin/highAvailability/progress_popup_controller.js
+++ b/ambari-web/app/controllers/main/admin/highAvailability/progress_popup_controller.js
@@ -180,7 +180,7 @@ App.HighAvailabilityProgressPopupController = Ember.Controller.extend({
     var hosts = [];
     var hostsMap = {};
     var popupTitle = this.get('popupTitle');
-    var id = data[0].Requests.id;
+    var id = this.get('requestIds')[0];
 
     data.forEach(function (request) {
       request.tasks.forEach(function (task) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/cb27cee5/ambari-web/test/controllers/main/admin/highAvailability/progress_popup_controller_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/main/admin/highAvailability/progress_popup_controller_test.js b/ambari-web/test/controllers/main/admin/highAvailability/progress_popup_controller_test.js
index 2742c44..b14bf86 100644
--- a/ambari-web/test/controllers/main/admin/highAvailability/progress_popup_controller_test.js
+++ b/ambari-web/test/controllers/main/admin/highAvailability/progress_popup_controller_test.js
@@ -343,9 +343,6 @@ describe('App.HighAvailabilityProgressPopupController', function () {
     it("calculate data", function() {
       var data = [
         {
-          Requests: {
-            id: 1
-          },
           tasks: [
             {
               Tasks: {
@@ -363,6 +360,7 @@ describe('App.HighAvailabilityProgressPopupController', function () {
         }
       ];
       controller.setProperties({
+        requestIds: [1],
         popupTitle: 'popupTitle'
       });
       controller.calculateHostsData(data);


[50/50] [abbrv] ambari git commit: Merge branch 'trunk' into branch-feature-AMBARI-20859

Posted by rl...@apache.org.
Merge branch 'trunk' into branch-feature-AMBARI-20859


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/17243c68
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/17243c68
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/17243c68

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 17243c68ed31bcc1abd618bffe0426d96da897b7
Parents: 3cefb74 19fe4cf
Author: Robert Levas <rl...@hortonworks.com>
Authored: Mon Oct 2 16:38:32 2017 -0400
Committer: Robert Levas <rl...@hortonworks.com>
Committed: Mon Oct 2 16:38:32 2017 -0400

----------------------------------------------------------------------
 .../authentication/AuthenticationMainCtrl.js    |    4 +-
 .../stackVersions/StackVersionsCreateCtrl.js    |   15 +-
 .../stackVersions/StackVersionsEditCtrl.js      |    9 +-
 .../stackVersions/StackVersionsListCtrl.js      |   20 +
 .../ui/admin-web/app/scripts/i18n.config.js     |   10 +-
 .../ui/admin-web/app/scripts/services/Stack.js  |   23 +-
 .../admin-web/app/views/stackVersions/list.html |   14 +-
 .../views/stackVersions/stackVersionPage.html   |    8 +-
 ambari-agent/conf/unix/ambari-agent             |   20 +
 .../src/main/python/ambari_agent/HostInfo.py    |   29 +-
 .../python/ambari_agent/alerts/base_alert.py    |   15 +-
 .../python/ambari_agent/alerts/web_alert.py     |    4 +
 .../src/test/python/ambari_agent/TestAlerts.py  |   19 +-
 .../test/python/ambari_agent/TestHostInfo.py    |  275 +-
 .../resource_management/TestPackageResource.py  |    2 +-
 .../resource_management/TestPackagesAnalyzer.py |  187 -
 .../python/resource_management/TestScript.py    |   23 +-
 .../resource_management/TestUserResource.py     |    2 +-
 .../src/main/python/ambari_commons/shell.py     |  139 +-
 .../python/resource_management/core/base.py     |   11 +
 .../resource_management/core/files/killtree.sh  |    2 +-
 .../core/providers/__init__.py                  |   42 +-
 .../core/providers/accounts.py                  |    6 +-
 .../core/providers/package/__init__.py          |  250 +-
 .../core/providers/package/apt.py               |  289 +-
 .../core/providers/package/yumrpm.py            |  268 +-
 .../core/providers/package/zypper.py            |  147 +-
 .../core/resources/accounts.py                  |    4 +-
 .../resource_management/core/resources/klist.py |   45 +
 .../libraries/functions/check_process_status.py |   20 +
 .../libraries/functions/component_version.py    |   69 +
 .../libraries/functions/conf_select.py          |  292 +-
 .../libraries/functions/constants.py            |    3 +-
 .../libraries/functions/copy_tarball.py         |  173 +-
 .../libraries/functions/curl_krb_request.py     |    2 +-
 .../libraries/functions/packages_analyzer.py    |  356 -
 .../libraries/functions/repository_util.py      |   42 +-
 .../functions/setup_ranger_plugin_xml.py        |    1 +
 .../libraries/functions/stack_features.py       |   33 +-
 .../libraries/functions/stack_select.py         |  322 +-
 .../libraries/functions/stack_tools.py          |   27 +-
 .../libraries/functions/tar_archive.py          |   24 +-
 .../libraries/functions/upgrade_summary.py      |  146 +
 .../libraries/functions/version.py              |   17 +-
 .../libraries/functions/version_select_util.py  |   64 +-
 .../libraries/providers/hdfs_resource.py        |   48 +-
 .../libraries/script/script.py                  |   88 +-
 .../HDP/2.0.8/services/HDFS/kerberos.json       |   12 +-
 .../upgrades/upgrade_nonrolling_new_stack.xml   |    6 +-
 .../2.1.1/upgrades/upgrade_test_nonrolling.xml  |    4 +-
 ambari-infra/ambari-infra-assembly/pom.xml      |    8 +
 .../src/main/package/deb/manager/postinst       |    5 +
 .../src/main/package/deb/solr-client/postinst   |   13 +
 .../src/main/package/rpm/manager/postinstall.sh |   20 +
 .../main/package/rpm/solr-client/postinstall.sh |   28 +
 ambari-infra/ambari-infra-manager/.gitignore    |    2 +
 .../ambari-infra-manager/docker/bin/start.sh    |    2 +-
 ambari-infra/ambari-infra-manager/pom.xml       |  172 +-
 .../org/apache/ambari/infra/InfraManager.java   |  210 +-
 .../infra/common/InfraManagerConstants.java     |   31 -
 .../infra/conf/InfraManagerDataConfig.java      |   37 +
 .../conf/InfraManagerJerseyResourceConfig.java  |   37 +
 .../infra/conf/InfraManagerServletConfig.java   |   83 +
 .../infra/conf/StaticResourceConfiguration.java |   49 +
 .../conf/batch/InfraManagerBatchConfig.java     |   86 -
 .../infra/doc/InfraManagerApiDocStorage.java    |   90 +
 .../ambari/infra/job/dummy/DummyItemWriter.java |   15 +-
 .../infra/job/dummy/DummyJobConfiguration.java  |  108 +
 .../ambari/infra/rest/ApiDocResource.java       |   56 +
 .../src/main/resources/infra-manager.properties |    4 +-
 .../src/main/resources/infraManager.sh          |   10 +-
 .../src/main/resources/log4j.xml                |   31 -
 .../src/main/resources/log4j2.xml               |   41 +
 .../src/main/resources/static/index.html        |   24 +
 .../src/main/resources/swagger/swagger.html     |   36 +-
 .../src/main/resources/webapp/index.html        |   24 -
 ambari-infra/ambari-infra-solr-client/build.xml |    4 +
 ambari-infra/ambari-infra-solr-client/pom.xml   |    5 +
 .../ambari/infra/solr/AmbariSolrCloudCLI.java   |   12 +-
 .../apache/ambari/infra/solr/S3Uploader.java    |   64 +
 .../src/main/python/solrDataManager.py          |  735 ++
 .../src/main/resources/log4j.properties         |    6 +-
 .../src/main/resources/solrCloudCli.sh          |    8 +-
 .../src/main/resources/solrIndexHelper.sh       |   19 +-
 .../ambari-logsearch-assembly/pom.xml           |    3 +-
 .../logsearch/config/api/LogSearchConfig.java   |   94 -
 .../config/api/LogSearchConfigFactory.java      |   61 +-
 .../config/api/LogSearchConfigLogFeeder.java    |   77 +
 .../config/api/LogSearchConfigServer.java       |  111 +
 .../config/api/OutputConfigMonitor.java         |   44 +
 .../model/outputconfig/OutputProperties.java    |   23 +
 .../outputconfig/OutputSolrProperties.java      |   26 +
 .../config/api/LogSearchConfigClass1.java       |   78 -
 .../config/api/LogSearchConfigClass2.java       |   78 -
 .../config/api/LogSearchConfigFactoryTest.java  |   51 +-
 .../api/LogSearchConfigLogFeederClass1.java     |   58 +
 .../api/LogSearchConfigLogFeederClass2.java     |   58 +
 .../config/api/LogSearchConfigServerClass1.java |   76 +
 .../config/api/LogSearchConfigServerClass2.java |   76 +
 .../zookeeper/LogSearchConfigLogFeederZK.java   |  228 +
 .../zookeeper/LogSearchConfigServerZK.java      |  138 +
 .../config/zookeeper/LogSearchConfigZK.java     |  236 +-
 .../impl/MapFieldNameDescriptorImpl.java        |    2 +-
 .../impl/OutputSolrPropertiesImpl.java          |   46 +
 .../logsearch/steps/LogSearchUISteps.java       |    4 +-
 .../resources/stories/selenium/login.ui.story   |    4 +-
 .../ambari-logsearch-logfeeder/build.properties |    2 +-
 .../org/apache/ambari/logfeeder/LogFeeder.java  |   22 +-
 .../ambari/logfeeder/common/ConfigHandler.java  |   38 +-
 .../logfeeder/common/LogEntryParseTester.java   |    2 +-
 .../logfeeder/input/AbstractInputFile.java      |    4 +-
 .../apache/ambari/logfeeder/input/Input.java    |   70 +-
 .../logfeeder/input/InputConfigUploader.java    |   27 +-
 .../ambari/logfeeder/input/InputManager.java    |   28 +-
 .../ambari/logfeeder/input/InputSimulate.java   |   78 +-
 .../loglevelfilter/LogLevelFilterHandler.java   |   29 +-
 .../logfeeder/metrics/LogFeederAMSClient.java   |   45 +-
 .../apache/ambari/logfeeder/output/Output.java  |   36 +-
 .../ambari/logfeeder/output/OutputHDFSFile.java |    3 +-
 .../ambari/logfeeder/output/OutputManager.java  |   11 +
 .../ambari/logfeeder/output/OutputS3File.java   |    3 +-
 .../ambari/logfeeder/output/OutputSolr.java     |  223 +-
 .../logfeeder/util/LogFeederPropertiesUtil.java |  498 +
 .../ambari/logfeeder/util/LogFeederUtil.java    |  165 +-
 .../apache/ambari/logfeeder/util/SSLUtil.java   |   14 +-
 .../src/main/resources/grok-patterns            |    2 +-
 .../logconfig/LogConfigHandlerTest.java         |    4 +-
 .../logfeeder/metrics/MetricsManagerTest.java   |    4 +-
 .../ambari/logfeeder/output/OutputSolrTest.java |   29 +-
 .../ambari-logsearch-server/build.properties    |    2 +-
 .../ambari-logsearch-server/build.xml           |    3 +
 .../ambari-logsearch-server/pom.xml             |  471 +-
 ambari-logsearch/ambari-logsearch-server/run.sh |    6 +-
 .../org/apache/ambari/logsearch/LogSearch.java  |  181 +-
 .../auth/filter/AbstractJWTFilter.java          |  193 +
 .../auth/model/JWTAuthenticationToken.java      |   47 +
 .../logsearch/common/ExternalServerClient.java  |   24 +-
 .../logsearch/common/LogSearchConstants.java    |    2 +-
 .../logsearch/common/PropertiesHelper.java      |  124 -
 .../logsearch/common/XMLPropertiesHelper.java   |   79 -
 .../ambari/logsearch/conf/ApiDocConfig.java     |    4 +-
 .../logsearch/conf/ApplicationConfig.java       |    9 +-
 .../conf/LogSearchConfigMapHolder.java          |   61 +
 .../logsearch/conf/LogSearchHttpConfig.java     |   83 +
 .../conf/LogSearchJerseyResourceConfig.java     |   37 +
 .../logsearch/conf/LogSearchServletConfig.java  |   95 +
 .../logsearch/conf/LogSearchSpnegoConfig.java   |  173 +
 .../logsearch/conf/LogSearchSslConfig.java      |   86 +
 .../ambari/logsearch/conf/SecurityConfig.java   |   36 +-
 .../logsearch/conf/SolrAuditLogPropsConfig.java |    5 +
 .../conf/SolrEventHistoryPropsConfig.java       |    5 +
 .../ambari/logsearch/conf/SolrPropsConfig.java  |    2 +
 .../conf/SolrServiceLogPropsConfig.java         |    5 +
 .../conf/StaticResourceConfiguration.java       |   49 +
 .../configurer/LogSearchConfigConfigurer.java   |   24 +-
 .../configurer/SolrAuditAliasConfigurer.java    |    4 +-
 .../configurer/SolrCollectionConfigurer.java    |   13 +-
 .../logsearch/configurer/SslConfigurer.java     |  363 +
 .../ambari/logsearch/dao/SolrDaoBase.java       |   22 +-
 .../logsearch/dao/SolrSchemaFieldDao.java       |    3 +-
 .../ambari/logsearch/doc/DocConstants.java      |    1 +
 .../handler/AbstractSolrConfigHandler.java      |    2 +
 .../handler/CreateCollectionHandler.java        |   44 +-
 .../handler/ReloadCollectionHandler.java        |    5 +-
 .../health/AbstractSolrHealthIndicator.java     |   59 +
 .../health/SolrAuditLogsHealthIndicator.java    |   38 +
 .../health/SolrEventHistoryHealthIndicator.java |   37 +
 .../health/SolrServiceLogsHealthIndicator.java  |   38 +
 .../ambari/logsearch/manager/InfoManager.java   |   19 +
 .../logsearch/manager/ShipperConfigManager.java |   24 +-
 .../logsearch/model/common/LSServerFilter.java  |   10 +-
 .../ambari/logsearch/rest/InfoResource.java     |    8 +
 .../logsearch/solr/ResponseDataGenerator.java   |    3 +-
 .../apache/ambari/logsearch/util/SSLUtil.java   |  388 -
 .../apache/ambari/logsearch/util/WebUtil.java   |   65 -
 .../LogsearchLogoutSuccessHandler.java          |    1 -
 .../web/filters/LogsearchJWTFilter.java         |  150 +-
 .../LogsearchKRBAuthenticationFilter.java       |  107 +-
 .../web/listener/LogSearchSessionListener.java  |    4 +-
 .../web/model/JWTAuthenticationToken.java       |   53 -
 .../logsearch/web/security/LdapProperties.java  |  365 -
 .../web/security/LdapPropertyName.java          |   58 -
 .../ambari/logsearch/web/security/LdapUtil.java |  115 -
 ...LogsearchAbstractAuthenticationProvider.java |    2 +-
 .../LogsearchAuthenticationProvider.java        |    4 -
 .../LogsearchLdapAuthenticationProvider.java    |  187 -
 .../LogsearchLdapBindAuthenticator.java         |   46 -
 .../src/main/resources/default.properties       |   24 +-
 .../src/main/resources/info.properties          |   17 +
 .../src/main/resources/log4j.xml                |  118 +-
 .../src/main/resources/logsearch-admin-site.xml |  116 -
 .../src/main/resources/logsearch.properties     |    6 +-
 .../src/main/resources/swagger/swagger.html     |   36 +-
 .../src/main/scripts/run.sh                     |    3 +-
 .../LogsearchAuthenticationProviderTest.java    |   68 +-
 ...LogsearchLdapAuthenticationProviderTest.java |   61 -
 .../ambari-logsearch-web/.angular-cli.json      |   42 +
 ambari-logsearch/ambari-logsearch-web/.bowerrc  |    3 -
 .../ambari-logsearch-web/.editorconfig          |   13 +
 .../ambari-logsearch-web/.gitignore             |   43 +-
 .../ambari-logsearch-web/Gruntfile.js           |  139 -
 .../ambari-logsearch-web/LICENSE.txt            |  339 -
 ambari-logsearch/ambari-logsearch-web/README.md |   28 +
 .../ambari-logsearch-web/bower.json             |   39 -
 .../ambari-logsearch-web/build.properties       |   12 -
 ambari-logsearch/ambari-logsearch-web/build.xml |   37 -
 .../ambari-logsearch-web/e2e/app.e2e-spec.ts    |   32 +
 .../ambari-logsearch-web/e2e/app.po.ts          |   29 +
 .../ambari-logsearch-web/e2e/tsconfig.e2e.json  |   12 +
 .../ambari-logsearch-web/karma.conf.js          |   62 +
 .../ambari-logsearch-web/package.json           |   99 +-
 ambari-logsearch/ambari-logsearch-web/pom.xml   |  143 +-
 .../ambari-logsearch-web/protractor.conf.js     |   48 +
 .../ambari-logsearch-web/src/app/app.module.ts  |  185 +
 .../classes/active-service-log-entry.class.ts   |   23 +
 .../src/app/classes/list-item.class.ts          |   26 +
 .../queries/audit-logs-query-params.class.ts    |   46 +
 .../app/classes/queries/query-params.class.ts   |   23 +
 ...ce-logs-histogram-query-params.class.spec.ts |  203 +
 ...service-logs-histogram-query-params.class.ts |   70 +
 .../queries/service-logs-query-params.class.ts  |   30 +
 ...service-logs-truncated-query-params.class.ts |   36 +
 .../classes/service-log-context-entry.class.ts  |   26 +
 .../accordion-panel.component.html              |   25 +
 .../accordion-panel.component.less              |   42 +
 .../accordion-panel.component.spec.ts           |   42 +
 .../accordion-panel.component.ts                |   33 +
 .../src/app/components/app.component.html       |   34 +
 .../src/app/components/app.component.less       |   51 +
 .../src/app/components/app.component.spec.ts    |   61 +
 .../src/app/components/app.component.ts         |   45 +
 .../date-picker/date-picker.component.html      |   25 +
 .../date-picker/date-picker.component.spec.ts   |   51 +
 .../date-picker/date-picker.component.ts        |   73 +
 .../dropdown-button.component.html              |   31 +
 .../dropdown-button.component.less              |   36 +
 .../dropdown-button.component.spec.ts           |   97 +
 .../dropdown-button.component.ts                |  106 +
 .../dropdown-list/dropdown-list.component.html  |   34 +
 .../dropdown-list/dropdown-list.component.less  |   41 +
 .../dropdown-list.component.spec.ts             |  128 +
 .../dropdown-list/dropdown-list.component.ts    |   71 +
 .../filter-button.component.spec.ts             |   97 +
 .../filter-button/filter-button.component.ts    |   81 +
 .../filter-dropdown.component.spec.ts           |  107 +
 .../filter-dropdown.component.ts                |   63 +
 .../filters-panel/filters-panel.component.html  |   50 +
 .../filters-panel/filters-panel.component.less  |   69 +
 .../filters-panel.component.spec.ts             |  110 +
 .../filters-panel/filters-panel.component.ts    |   96 +
 .../log-context/log-context.component.html      |   33 +
 .../log-context/log-context.component.less      |   23 +
 .../log-context/log-context.component.spec.ts   |  108 +
 .../log-context/log-context.component.ts        |   91 +
 .../log-file-entry.component.html               |   20 +
 .../log-file-entry.component.less               |   31 +
 .../log-file-entry.component.spec.ts            |   56 +
 .../log-file-entry/log-file-entry.component.ts  |   51 +
 .../login-form/login-form.component.html        |   33 +
 .../login-form/login-form.component.less        |   22 +
 .../login-form/login-form.component.spec.ts     |  111 +
 .../login-form/login-form.component.ts          |   59 +
 .../logs-container.component.html               |   29 +
 .../logs-container.component.less               |   29 +
 .../logs-container.component.spec.ts            |  107 +
 .../logs-container/logs-container.component.ts  |  124 +
 .../logs-list/logs-list.component.html          |   65 +
 .../logs-list/logs-list.component.less          |  109 +
 .../logs-list/logs-list.component.spec.ts       |   95 +
 .../components/logs-list/logs-list.component.ts |  150 +
 .../main-container.component.html               |   31 +
 .../main-container.component.less               |   28 +
 .../main-container.component.spec.ts            |   76 +
 .../main-container/main-container.component.ts  |   90 +
 .../menu-button/menu-button.component.html      |   28 +
 .../menu-button/menu-button.component.less      |   33 +
 .../menu-button/menu-button.component.spec.ts   |  171 +
 .../menu-button/menu-button.component.ts        |   98 +
 .../app/components/modal/modal.component.html   |   40 +
 .../components/modal/modal.component.spec.ts    |   45 +
 .../src/app/components/modal/modal.component.ts |  122 +
 .../components/node-bar/node-bar.component.html |   19 +
 .../components/node-bar/node-bar.component.less |   39 +
 .../node-bar/node-bar.component.spec.ts         |   43 +
 .../components/node-bar/node-bar.component.ts   |   35 +
 .../pagination-controls.component.html          |   23 +
 .../pagination-controls.component.less          |   23 +
 .../pagination-controls.component.spec.ts       |   43 +
 .../pagination-controls.component.ts            |   73 +
 .../pagination/pagination.component.html        |   24 +
 .../pagination/pagination.component.less        |   28 +
 .../pagination/pagination.component.spec.ts     |   52 +
 .../pagination/pagination.component.ts          |   72 +
 .../search-box/search-box.component.html        |   32 +
 .../search-box/search-box.component.less        |  122 +
 .../search-box/search-box.component.spec.ts     |   51 +
 .../search-box/search-box.component.ts          |  212 +
 .../time-histogram.component.html               |   18 +
 .../time-histogram.component.less               |   29 +
 .../time-histogram.component.spec.ts            |   53 +
 .../time-histogram/time-histogram.component.ts  |  161 +
 .../time-range-picker.component.html            |   41 +
 .../time-range-picker.component.less            |   35 +
 .../time-range-picker.component.spec.ts         |   73 +
 .../time-range-picker.component.ts              |  103 +
 .../timezone-picker.component.html              |   26 +
 .../timezone-picker.component.less              |   45 +
 .../timezone-picker.component.spec.ts           |   99 +
 .../timezone-picker.component.ts                |   77 +
 .../components/top-menu/top-menu.component.html |   21 +
 .../components/top-menu/top-menu.component.less |   22 +
 .../top-menu/top-menu.component.spec.ts         |   47 +
 .../components/top-menu/top-menu.component.ts   |  122 +
 .../src/app/components/variables.less           |  170 +
 .../ambari-logsearch-web/src/app/mock-data.ts   | 1067 +++
 .../src/app/models/app-settings.model.ts        |   27 +
 .../src/app/models/app-state.model.ts           |   43 +
 .../src/app/models/audit-log-field.model.ts     |  225 +
 .../src/app/models/audit-log.model.ts           |   46 +
 .../src/app/models/bar-graph.model.ts           |   24 +
 .../src/app/models/common-entry.model.ts        |   22 +
 .../src/app/models/count.model.ts               |   22 +
 .../src/app/models/filter.model.ts              |   25 +
 .../src/app/models/graph.model.ts               |   23 +
 .../src/app/models/log-field.model.ts           |   27 +
 .../src/app/models/log.model.ts                 |   38 +
 .../src/app/models/node.model.ts                |   29 +
 .../src/app/models/service-log-field.model.ts   |  107 +
 .../src/app/models/service-log.model.ts         |   27 +
 .../app/models/solr-collection-state.model.ts   |   23 +
 .../src/app/models/store.model.ts               |  180 +
 .../src/app/models/user-config.model.ts         |   26 +
 .../src/app/pipes/timer-seconds.pipe.spec.ts    |   37 +
 .../src/app/pipes/timer-seconds.pipe.ts         |   41 +
 .../src/app/pipes/timezone-abbr.pipe.spec.ts    |   26 +
 .../src/app/pipes/timezone-abbr.pipe.ts         |   31 +
 .../services/component-actions.service.spec.ts  |   91 +
 .../app/services/component-actions.service.ts   |  118 +
 .../component-generator.service.spec.ts         |   90 +
 .../app/services/component-generator.service.ts |   57 +
 .../src/app/services/filtering.service.spec.ts  |   67 +
 .../src/app/services/filtering.service.ts       |  531 ++
 .../app/services/http-client.service.spec.ts    |   67 +
 .../src/app/services/http-client.service.ts     |  143 +
 .../app/services/logs-container.service.spec.ts |   88 +
 .../src/app/services/logs-container.service.ts  |  199 +
 .../app/services/mock-api-data.service.spec.ts  |   81 +
 .../src/app/services/mock-api-data.service.ts   |  178 +
 .../services/storage/app-settings.service.ts    |   33 +
 .../app/services/storage/app-state.service.ts   |   33 +
 .../storage/audit-logs-fields.service.ts        |   32 +
 .../app/services/storage/audit-logs.service.ts  |   32 +
 .../app/services/storage/clusters.service.ts    |   32 +
 .../app/services/storage/components.service.ts  |   32 +
 .../src/app/services/storage/filters.service.ts |   33 +
 .../src/app/services/storage/graphs.service.ts  |   33 +
 .../src/app/services/storage/hosts.service.ts   |   32 +
 .../app/services/storage/reducers.service.ts    |   54 +
 .../storage/service-logs-fields.service.ts      |   32 +
 .../service-logs-histogram-data.service.ts      |   32 +
 .../storage/service-logs-truncated.service.ts   |   32 +
 .../services/storage/service-logs.service.ts    |   32 +
 .../services/storage/user-configs.service.ts    |   33 +
 .../src/app/services/utils.service.spec.ts      |   86 +
 .../src/app/services/utils.service.ts           |   63 +
 .../src/app/test-config.spec.ts                 |   36 +
 .../src/assets/i18n/en.json                     |  150 +
 .../src/environments/environment.prod.ts        |   21 +
 .../src/environments/environment.ts             |   21 +
 .../ambari-logsearch-web/src/favicon.ico        |  Bin 0 -> 1150 bytes
 .../ambari-logsearch-web/src/index.html         |   31 +
 .../ambari-logsearch-web/src/main.ts            |   29 +
 .../src/main/webapp/404.html                    |  170 -
 .../src/main/webapp/ajax_failure.jsp            |   31 -
 .../src/main/webapp/ajax_success.html           |   26 -
 .../src/main/webapp/favicon.ico                 |  Bin 1150 -> 0 bytes
 .../webapp/fonts/fontawesome/FontAwesome.otf    |  Bin 61896 -> 0 bytes
 .../fonts/fontawesome/fontawesome-webfont.eot   |  Bin 37405 -> 0 bytes
 .../fonts/fontawesome/fontawesome-webfont.svg   |  415 -
 .../fonts/fontawesome/fontawesome-webfont.ttf   |  Bin 79076 -> 0 bytes
 .../fonts/fontawesome/fontawesome-webfont.woff  |  Bin 43572 -> 0 bytes
 .../fonts/fontopensans/open-sans-300.woff       |  Bin 22656 -> 0 bytes
 .../fonts/fontopensans/open-sans-300i.woff      |  Bin 21524 -> 0 bytes
 .../fonts/fontopensans/open-sans-400.woff       |  Bin 21956 -> 0 bytes
 .../fonts/fontopensans/open-sans-400i.woff      |  Bin 21092 -> 0 bytes
 .../fonts/fontopensans/open-sans-600.woff       |  Bin 22604 -> 0 bytes
 .../fonts/fontopensans/open-sans-600i.woff      |  Bin 21252 -> 0 bytes
 .../fonts/fontopensans/open-sans-700.woff       |  Bin 22748 -> 0 bytes
 .../fonts/fontopensans/open-sans-700i.woff      |  Bin 21184 -> 0 bytes
 .../main/webapp/images/System-Settings-icon.png |  Bin 17540 -> 0 bytes
 .../src/main/webapp/images/avatar.png           |  Bin 761 -> 0 bytes
 .../src/main/webapp/images/blank.gif            |  Bin 43 -> 0 bytes
 .../src/main/webapp/images/blockLoading.gif     |  Bin 3209 -> 0 bytes
 .../src/main/webapp/images/body-bg.png          |  Bin 3375 -> 0 bytes
 .../src/main/webapp/images/cancel_search.png    |  Bin 541 -> 0 bytes
 .../src/main/webapp/images/clear.png            |  Bin 509 -> 0 bytes
 .../src/main/webapp/images/comp.png             |  Bin 1603 -> 0 bytes
 .../src/main/webapp/images/data-grey.png        |  Bin 1468 -> 0 bytes
 .../main/webapp/images/database_table_32.png    |  Bin 1329 -> 0 bytes
 .../src/main/webapp/images/error-404-icon.png   |  Bin 2529 -> 0 bytes
 .../src/main/webapp/images/error-500-icon.png   |  Bin 2986 -> 0 bytes
 .../src/main/webapp/images/folder-grey.png      |  Bin 1372 -> 0 bytes
 .../src/main/webapp/images/folder.png           |  Bin 920 -> 0 bytes
 .../src/main/webapp/images/folder2.png          |  Bin 946 -> 0 bytes
 .../src/main/webapp/images/form.png             |  Bin 3329 -> 0 bytes
 .../src/main/webapp/images/hbase.jpg            |  Bin 4575 -> 0 bytes
 .../src/main/webapp/images/hdfs.jpg             |  Bin 5232 -> 0 bytes
 .../src/main/webapp/images/hive.png             |  Bin 20483 -> 0 bytes
 .../src/main/webapp/images/hline.png            |  Bin 99 -> 0 bytes
 .../src/main/webapp/images/icon-collapse.png    |  Bin 18815 -> 0 bytes
 .../src/main/webapp/images/icon-expand.png      |  Bin 20092 -> 0 bytes
 .../src/main/webapp/images/knox.gif             |  Bin 2347 -> 0 bytes
 .../src/main/webapp/images/knox1.png            |  Bin 34126 -> 0 bytes
 .../src/main/webapp/images/loading.gif          |  Bin 1849 -> 0 bytes
 .../src/main/webapp/images/loading_game.gif     |  Bin 16098 -> 0 bytes
 .../src/main/webapp/images/logo-white.png       |  Bin 4538 -> 0 bytes
 .../src/main/webapp/images/logo.png             |  Bin 1168 -> 0 bytes
 .../src/main/webapp/images/router-grey.png      |  Bin 1533 -> 0 bytes
 .../src/main/webapp/images/search_glyph.png     |  Bin 420 -> 0 bytes
 .../src/main/webapp/images/sortingCollapse.png  |  Bin 13320 -> 0 bytes
 .../src/main/webapp/images/sortingExpand.png    |  Bin 13283 -> 0 bytes
 .../src/main/webapp/images/storm.png            |  Bin 6174 -> 0 bytes
 .../src/main/webapp/images/storm1.png           |  Bin 19620 -> 0 bytes
 .../src/main/webapp/images/task-grey.png        |  Bin 1187 -> 0 bytes
 .../src/main/webapp/images/timezone.png         |  Bin 353544 -> 0 bytes
 .../src/main/webapp/index.html                  |  108 -
 .../backbone-pageable/backbone-pageable.min.js  |    8 -
 .../webapp/libs/bower/backbone.fetch-cache.js   |  326 -
 .../src/main/webapp/libs/bower/d3/d3.tip.js     |  293 -
 .../webapp/libs/bower/dashboard/dashboard.js    |  393 -
 .../bower/font-awesome/css/font-awesome.css     | 1801 ----
 .../bower/font-awesome/css/font-awesome.min.css |    4 -
 .../bower/font-awesome/fonts/FontAwesome.otf    |  Bin 61896 -> 0 bytes
 .../font-awesome/fonts/fontawesome-webfont.eot  |  Bin 37405 -> 0 bytes
 .../font-awesome/fonts/fontawesome-webfont.svg  |  415 -
 .../font-awesome/fonts/fontawesome-webfont.ttf  |  Bin 79076 -> 0 bytes
 .../font-awesome/fonts/fontawesome-webfont.woff |  Bin 43572 -> 0 bytes
 .../fonts/fontawesome-webfont.woff2             |  Bin 56780 -> 0 bytes
 .../libs/bower/font-awesome/less/animated.less  |   34 -
 .../font-awesome/less/bordered-pulled.less      |   16 -
 .../libs/bower/font-awesome/less/core.less      |   13 -
 .../bower/font-awesome/less/fixed-width.less    |    6 -
 .../bower/font-awesome/less/font-awesome.less   |   17 -
 .../libs/bower/font-awesome/less/icons.less     |  596 --
 .../libs/bower/font-awesome/less/larger.less    |   13 -
 .../libs/bower/font-awesome/less/list.less      |   19 -
 .../libs/bower/font-awesome/less/mixins.less    |   27 -
 .../libs/bower/font-awesome/less/path.less      |   15 -
 .../font-awesome/less/rotated-flipped.less      |   20 -
 .../libs/bower/font-awesome/less/stacked.less   |   20 -
 .../libs/bower/font-awesome/less/variables.less |  606 --
 .../libs/bower/font-awesome/scss/_animated.scss |   34 -
 .../font-awesome/scss/_bordered-pulled.scss     |   16 -
 .../libs/bower/font-awesome/scss/_core.scss     |   13 -
 .../bower/font-awesome/scss/_fixed-width.scss   |    6 -
 .../libs/bower/font-awesome/scss/_icons.scss    |  596 --
 .../libs/bower/font-awesome/scss/_larger.scss   |   13 -
 .../libs/bower/font-awesome/scss/_list.scss     |   19 -
 .../libs/bower/font-awesome/scss/_mixins.scss   |   27 -
 .../libs/bower/font-awesome/scss/_path.scss     |   15 -
 .../font-awesome/scss/_rotated-flipped.scss     |   20 -
 .../libs/bower/font-awesome/scss/_stacked.scss  |   20 -
 .../bower/font-awesome/scss/_variables.scss     |  606 --
 .../bower/font-awesome/scss/font-awesome.scss   |   17 -
 .../jquery-ui/css/images/animated-overlay.gif   |  Bin 1738 -> 0 bytes
 .../ui-bg_diagonals-thick_18_b81900_40x40.png   |  Bin 418 -> 0 bytes
 .../ui-bg_diagonals-thick_20_666666_40x40.png   |  Bin 312 -> 0 bytes
 .../css/images/ui-bg_flat_0_aaaaaa_40x100.png   |  Bin 180 -> 0 bytes
 .../css/images/ui-bg_flat_10_000000_40x100.png  |  Bin 205 -> 0 bytes
 .../css/images/ui-bg_glass_100_f6f6f6_1x400.png |  Bin 262 -> 0 bytes
 .../css/images/ui-bg_glass_100_fdf5ce_1x400.png |  Bin 348 -> 0 bytes
 .../css/images/ui-bg_glass_65_ffffff_1x400.png  |  Bin 207 -> 0 bytes
 .../ui-bg_gloss-wave_35_f6a828_500x100.png      |  Bin 5815 -> 0 bytes
 .../ui-bg_highlight-soft_100_eeeeee_1x100.png   |  Bin 278 -> 0 bytes
 .../ui-bg_highlight-soft_75_ffe45c_1x100.png    |  Bin 328 -> 0 bytes
 .../css/images/ui-icons_222222_256x240.png      |  Bin 6922 -> 0 bytes
 .../css/images/ui-icons_228ef1_256x240.png      |  Bin 4549 -> 0 bytes
 .../css/images/ui-icons_454545_256x240.png      |  Bin 4369 -> 0 bytes
 .../css/images/ui-icons_888888_256x240.png      |  Bin 4369 -> 0 bytes
 .../css/images/ui-icons_ef8c08_256x240.png      |  Bin 4549 -> 0 bytes
 .../css/images/ui-icons_ffd27a_256x240.png      |  Bin 4549 -> 0 bytes
 .../css/images/ui-icons_ffffff_256x240.png      |  Bin 6299 -> 0 bytes
 .../bower/jquery-ui/css/jquery-ui-1.11.4.css    |  850 --
 .../jquery-ui/css/jquery-ui-1.11.4.min.css      |    7 -
 .../bower/jquery-ui/css/jquery-ui-bootstrap.css | 2549 -----
 .../libs/bower/jquery-ui/js/jquery-ui-1.11.4.js | 8772 ------------------
 .../bower/jquery-ui/js/jquery-ui-1.11.4.min.js  |   10 -
 .../src/main/webapp/libs/bower/loadCSS.js       |   73 -
 .../require-handlebars-plugin/Handlebars.js     | 2752 ------
 .../webapp/libs/bower/select2/select2x2.png     |  Bin 845 -> 0 bytes
 .../libs/bower/sparkline/jquery.sparkline.js    | 3054 ------
 .../Backgrid.ColumnManager.css                  |  114 -
 .../Backgrid.ColumnManager.js                   | 1045 ---
 .../daterangepicker/css/daterangepicker.css     |  415 -
 .../daterangepicker/js/daterangepicker.js       | 1560 ----
 .../libs/custom/timezone/WorldMapGenerator.js   | 3474 -------
 .../libs/custom/timezone/jstz-1.0.4.min.js      |    2 -
 .../webapp/libs/custom/visualsearch/.bower.json |   45 -
 .../custom/visualsearch/models/search_facets.js |   67 -
 .../custom/visualsearch/models/search_query.js  |   70 -
 .../visualsearch/templates/search_box.jst       |    8 -
 .../visualsearch/templates/search_facet.jst     |    9 -
 .../visualsearch/templates/search_input.jst     |    1 -
 .../custom/visualsearch/templates/templates.js  |    7 -
 .../visualsearch/utils/backbone_extensions.js   |   17 -
 .../libs/custom/visualsearch/utils/hotkeys.js   |   99 -
 .../libs/custom/visualsearch/utils/inflector.js |   21 -
 .../visualsearch/utils/jquery_extensions.js     |  197 -
 .../custom/visualsearch/utils/search_parser.js  |   87 -
 .../custom/visualsearch/views/search_box.js     |  458 -
 .../custom/visualsearch/views/search_facet.js   |  442 -
 .../custom/visualsearch/views/search_input.js   |  418 -
 .../libs/custom/visualsearch/visualsearch.js    | 1984 ----
 .../src/main/webapp/login.html                  |  154 -
 .../src/main/webapp/robots.txt                  |    3 -
 .../src/main/webapp/scripts/App.js              |   86 -
 .../src/main/webapp/scripts/Init.js             |  196 -
 .../src/main/webapp/scripts/Main.js             |   35 -
 .../src/main/webapp/scripts/RegionManager.js    |   83 -
 .../collection_bases/VAuditLogListBase.js       |   73 -
 .../collection_bases/VEventHistoryListBase.js   |   87 -
 .../scripts/collection_bases/VGroupListBase.js  |   82 -
 .../collection_bases/VLogLevelListBase.js       |   96 -
 .../scripts/collection_bases/VLogListBase.js    |  127 -
 .../collection_bases/VNameValueListBase.js      |   57 -
 .../scripts/collection_bases/VNodeListBase.js   |   74 -
 .../scripts/collections/BaseCollection.js       |  172 -
 .../scripts/collections/SchemaFieldList.js      |   29 -
 .../webapp/scripts/collections/VAuditLogList.js |   36 -
 .../scripts/collections/VEventHistoryList.js    |   33 -
 .../webapp/scripts/collections/VGroupList.js    |   36 -
 .../webapp/scripts/collections/VLogLevelList.js |   36 -
 .../main/webapp/scripts/collections/VLogList.js |   36 -
 .../scripts/collections/VNameValueList.js       |   36 -
 .../webapp/scripts/collections/VNodeList.js     |   36 -
 .../src/main/webapp/scripts/communicator.js     |   61 -
 .../webapp/scripts/controllers/Controller.js    |  240 -
 .../webapp/scripts/model_bases/VAuditLogBase.js |   61 -
 .../scripts/model_bases/VCommonModelBase.js     |   51 -
 .../scripts/model_bases/VEventHistoryBase.js    |   51 -
 .../scripts/model_bases/VGraphInfoBase.js       |   62 -
 .../webapp/scripts/model_bases/VGroupBase.js    |   51 -
 .../main/webapp/scripts/model_bases/VLogBase.js |   51 -
 .../webapp/scripts/model_bases/VLogLevelBase.js |   51 -
 .../scripts/model_bases/VNameValueBase.js       |   51 -
 .../webapp/scripts/model_bases/VNodeBase.js     |   51 -
 .../scripts/model_bases/VUserFilterBase.js      |   51 -
 .../src/main/webapp/scripts/models/BaseModel.js |  110 -
 .../src/main/webapp/scripts/models/VAppState.js |   44 -
 .../src/main/webapp/scripts/models/VAuditLog.js |   47 -
 .../main/webapp/scripts/models/VCommonModel.js  |   48 -
 .../main/webapp/scripts/models/VEventHistory.js |   48 -
 .../main/webapp/scripts/models/VGraphInfo.js    |   53 -
 .../src/main/webapp/scripts/models/VGroup.js    |   53 -
 .../src/main/webapp/scripts/models/VLog.js      |   66 -
 .../src/main/webapp/scripts/models/VLogLevel.js |   47 -
 .../main/webapp/scripts/models/VNameValue.js    |   47 -
 .../src/main/webapp/scripts/models/VNode.js     |   48 -
 .../main/webapp/scripts/models/VUserFilter.js   |   51 -
 .../main/webapp/scripts/modules/Overrides.js    |  234 -
 .../src/main/webapp/scripts/modules/Vent.js     |   26 -
 .../scripts/modules/globalize/message/en.js     |   79 -
 .../main/webapp/scripts/prelogin/Prelogin.js    |  130 -
 .../src/main/webapp/scripts/utils/Enums.js      |   32 -
 .../src/main/webapp/scripts/utils/Globals.js    |   91 -
 .../main/webapp/scripts/utils/LangSupport.js    |  151 -
 .../src/main/webapp/scripts/utils/Tour.js       |  461 -
 .../src/main/webapp/scripts/utils/Utils.js      | 1241 ---
 .../src/main/webapp/scripts/utils/ViewUtils.js  |  266 -
 .../webapp/scripts/utils/XATemplateHelpers.js   |  170 -
 .../scripts/views/audit/AuditAggregatedView.js  |  416 -
 .../scripts/views/audit/AuditTabLayoutView.js   |  429 -
 .../scripts/views/common/AdvanceSearchLayout.js |  146 -
 .../webapp/scripts/views/common/BreadCrumbs.js  |   83 -
 .../scripts/views/common/CustomBackgrid.js      |  284 -
 .../scripts/views/common/DatePickerLayout.js    |  228 -
 .../webapp/scripts/views/common/ErrorView.js    |  100 -
 .../scripts/views/common/EventHistoryLayout.js  |  486 -
 .../main/webapp/scripts/views/common/Footer.js  |   80 -
 .../main/webapp/scripts/views/common/Header.js  |  481 -
 .../webapp/scripts/views/common/JBDialog.js     |  211 -
 .../scripts/views/common/LogSnapShotLayout.js   |  141 -
 .../webapp/scripts/views/common/ProfileBar.js   |  105 -
 .../main/webapp/scripts/views/common/Spinner.js |   85 -
 .../webapp/scripts/views/common/TableLayout.js  |  405 -
 .../webapp/scripts/views/common/TimerView.js    |  126 -
 .../main/webapp/scripts/views/common/TopNav.js  |   93 -
 .../dashboard/BubbleGraphTableLayoutView.js     |  716 --
 .../views/dashboard/ComponentListView.js        |  319 -
 .../scripts/views/dashboard/ComponentsView.js   |   98 -
 .../scripts/views/dashboard/DashboardView.js    |  184 -
 .../views/dashboard/GridTableLayoutView.js      |  219 -
 .../scripts/views/dashboard/HostListView.js     |  356 -
 .../webapp/scripts/views/dashboard/HostsView.js |   98 -
 .../scripts/views/dashboard/LogDetailView.js    |  249 -
 .../scripts/views/dashboard/LogLevelBoxView.js  |  155 -
 .../scripts/views/dashboard/LogLevelView.js     |  118 -
 .../scripts/views/dashboard/MainLayoutView.js   |  670 --
 .../views/dialog/ApplySearchFilterView.js       |  214 -
 .../scripts/views/dialog/DetailLogFileView.js   |  167 -
 .../dialog/GlobalExclusionCompositeView.js      |  141 -
 .../views/dialog/GlobalExclusionItemView.js     |   84 -
 .../views/dialog/GridGraphSettingView.js        |  476 -
 .../views/dialog/SaveSearchFilterView.js        |  120 -
 .../scripts/views/dialog/TimeZoneChangeView.js  |  101 -
 .../views/filter/CreateLogfeederFilterView.js   |  383 -
 .../scripts/views/graphs/GraphLayoutView.js     |  559 --
 .../scripts/views/graphs/GridGraphLayoutView.js |  594 --
 .../scripts/views/tabs/ComparisonLayoutView.js  |   93 -
 .../webapp/scripts/views/tabs/ComparisonView.js |  242 -
 .../views/tabs/EventHistoryLayoutView.js        |  506 -
 .../scripts/views/tabs/ExportLogFileView.js     |   50 -
 .../views/tabs/HierarchyTabLayoutView.js        |  429 -
 .../scripts/views/tabs/HostInfoTabLayoutView.js |  178 -
 .../webapp/scripts/views/tabs/LogFileView.js    |  864 --
 .../main/webapp/scripts/views/tabs/TreeView.js  |  365 -
 .../scripts/views/tabs/VisualSearchView.js      |  202 -
 .../troubleshoot/TroubleShootLayoutView.js      |  553 --
 .../src/main/webapp/static/schema_fields.json   |   90 -
 .../src/main/webapp/styles/animate.css          | 3272 -------
 .../src/main/webapp/styles/app-font.css         |   64 -
 .../src/main/webapp/styles/bootstrap.css        | 6422 -------------
 .../src/main/webapp/styles/bootstrap.min.css    | 6422 -------------
 .../main/webapp/styles/custom/visualsearch.css  |  369 -
 .../src/main/webapp/styles/style.css            | 2964 ------
 .../src/main/webapp/styles/style_v2.css         | 2596 ------
 .../audit/AuditAggregatedView_tmpl.html         |   54 -
 .../audit/AuditTabLayoutView_tmpl.html          |  100 -
 .../common/AdvanceSearchLayout_tmpl.html        |   30 -
 .../templates/common/DatePickerLayout_tmpl.html |   29 -
 .../webapp/templates/common/ErrorView_tmpl.html |   28 -
 .../common/EventHistoryItemView_tmpl.html       |   32 -
 .../common/EventHistoryLayout_tmpl.html         |   44 -
 .../webapp/templates/common/Footer_tmpl.html    |   42 -
 .../webapp/templates/common/Header_tmpl.html    |   98 -
 .../common/LogSnapShotLayout_tmpl.html          |   23 -
 .../templates/common/TableLayout_tmpl.html      |   37 -
 .../webapp/templates/common/TimerView_tmpl.html |   25 -
 .../webapp/templates/common/TopNav_tmpl.html    |   52 -
 .../webapp/templates/common/breadcrumbs.html    |   46 -
 .../webapp/templates/common/formInputItem.html  |   54 -
 .../templates/common/formInputItemList.html     |   42 -
 .../webapp/templates/common/loading_tmpl.html   |   45 -
 .../BubbleGraphTableLayoutView_tmpl.html        |   79 -
 .../dashboard/ComponentListView_tmpl.html       |   35 -
 .../dashboard/ComponentsView_tmpl.html          |   57 -
 .../templates/dashboard/DashboardView_tmpl.html |  122 -
 .../dashboard/GridTableLayoutView_tmpl.html     |   28 -
 .../templates/dashboard/HostListView_tmpl.html  |   48 -
 .../templates/dashboard/HostsView_tmpl.html     |   56 -
 .../templates/dashboard/LogDetailView_tmpl.html |   75 -
 .../dashboard/LogLevelBoxView_tmpl.html         |   49 -
 .../templates/dashboard/LogLevelView_tmpl.html  |  110 -
 .../dashboard/MainLayoutView_tmpl.html          |  111 -
 .../dialog/ApplySearchFilterView_tmpl.html      |   30 -
 .../dialog/DetailLogFileView_tmpl.html          |   22 -
 .../GlobalExclusionCompositeView_tmpl.html      |   32 -
 .../dialog/GlobalExclusionItemView_tmpl.html    |   26 -
 .../dialog/GridGraphSettingView_tmpl.html       |  284 -
 .../dialog/SaveSearchFilterView_tmpl.html       |   70 -
 .../dialog/TimeZoneChangeView_tmpl.html         |  475 -
 .../filter/CreateLogfeederFilter_tmpl.html      |   53 -
 .../templates/graphs/GraphLayoutView_tmpl.html  |   50 -
 .../graphs/GridGraphLayoutView_tmpl.html        |   28 -
 .../src/main/webapp/templates/graphs/backup.js  |  586 --
 .../main/webapp/templates/helpers/Helpers.js    |  520 --
 .../tabs/ComparisonLayoutView_tmpl.html         |   20 -
 .../templates/tabs/ComparisonView_tmpl.html     |   56 -
 .../tabs/EventHistoryLayoutView_tmpl.html       |   45 -
 .../templates/tabs/ExportLogFileView_tmpl.html  |   46 -
 .../tabs/HierarchyTabLayoutView_tmpl.html       |  124 -
 .../tabs/HostInfoTabLayoutView_tmpl.html        |   46 -
 .../webapp/templates/tabs/LogFileView_tmpl.html |  122 -
 .../webapp/templates/tabs/TreeView_tmpl.html    |   54 -
 .../templates/tabs/VisualSearchView_tmpl.html   |   20 -
 .../TroubleShootLayoutView_tmpl.html            |  150 -
 .../main/webapp/themejs/1.3.0/bootstrap.min.js  |   19 -
 .../ambari-logsearch-web/src/polyfills.ts       |   86 +
 .../ambari-logsearch-web/src/styles.less        |   17 +
 .../ambari-logsearch-web/src/test.ts            |   47 +
 .../ambari-logsearch-web/src/tsconfig.app.json  |   13 +
 .../ambari-logsearch-web/src/tsconfig.spec.json |   20 +
 .../ambari-logsearch-web/src/typings.d.ts       |   23 +
 .../vendor/css/bootstrap-datetimepicker.min.css |    5 +
 .../src/vendor/css/bootstrap-logsearch.min.css  |   18 +
 .../vendor/css/fonts/Roboto-Regular-webfont.eot |  Bin 0 -> 79547 bytes
 .../vendor/css/fonts/Roboto-Regular-webfont.svg | 7606 +++++++++++++++
 .../vendor/css/fonts/Roboto-Regular-webfont.ttf |  Bin 0 -> 234464 bytes
 .../css/fonts/Roboto-Regular-webfont.woff       |  Bin 0 -> 105700 bytes
 .../src/vendor/js/WorldMapGenerator.min.js      |   11 +
 .../vendor/js/bootstrap-datetimepicker.min.js   |    2 +
 .../src/vendor/js/bootstrap-logsearch.min.js    |   19 +
 .../ambari-logsearch-web/tsconfig.json          |   31 +
 .../ambari-logsearch-web/tslint.json            |  116 +
 .../ambari-logsearch-web/webpack.config.js      |  540 ++
 ambari-logsearch/ambari-logsearch-web/yarn.lock | 6775 ++++++++++++++
 ambari-logsearch/docker/bin/start.sh            |    1 -
 ambari-logsearch/docker/logsearch-docker.sh     |   26 +-
 .../logfeeder/shipper-conf/output.config.json   |   10 +-
 .../docker/test-config/logsearch/log4j.xml      |   35 +-
 .../test-config/logsearch/logsearch-env.sh      |    5 +-
 .../logsearch/logsearch-https.properties        |    1 +
 .../docker/test-config/solr/solr-env-ssl.sh     |    1 +
 .../docker/test-config/solr/solr-env.sh         |    1 +
 ambari-logsearch/pom.xml                        |    7 +-
 ambari-metrics/ambari-metrics-assembly/pom.xml  |    2 +
 .../ambari-metrics/datasource.js                |   35 +-
 .../conf/unix/ambari-metrics-monitor            |   15 +-
 .../conf/unix/metric_monitor.ini                |    2 +
 .../src/main/python/core/config_reader.py       |    8 +
 .../src/main/python/core/emitter.py             |   58 +-
 .../src/main/python/core/krberr.py              |   42 +
 .../main/python/core/spnego_kerberos_auth.py    |  164 +
 .../src/test/python/core/TestEmitter.py         |   24 +
 .../conf/unix/metrics_whitelist                 |  159 +-
 .../conf/windows/metrics_whitelist              |  159 +-
 .../timeline/HBaseTimelineMetricStore.java      |    8 +-
 .../timeline/TimelineMetricConfiguration.java   |   13 +
 .../metrics/timeline/TimelineMetricsFilter.java |   24 +-
 .../MetricCollectorHAController.java            |   42 +-
 .../timeline/HBaseTimelineMetricStoreTest.java  |   36 +-
 .../timeline/TimelineMetricsFilterTest.java     |    3 +
 ambari-metrics/pom.xml                          |   27 +
 ambari-project/pom.xml                          |    1 +
 ambari-server/checkstyle.xml                    |    4 +-
 ambari-server/pom.xml                           |   12 +-
 .../persist/jpa/AmbariJpaPersistModule.java     |    2 +-
 .../actionmanager/ActionDBAccessorImpl.java     |   11 +
 .../server/actionmanager/ActionManager.java     |    2 +-
 .../server/actionmanager/ActionScheduler.java   |   47 +-
 .../actionmanager/ExecutionCommandWrapper.java  |  130 +-
 .../ambari/server/actionmanager/Stage.java      |   13 +-
 .../apache/ambari/server/agent/ActionQueue.java |    2 +-
 .../ambari/server/agent/AgentRequests.java      |    2 +-
 .../ambari/server/agent/CommandRepository.java  |   71 +-
 .../ambari/server/agent/ExecutionCommand.java   |   35 +-
 .../ambari/server/agent/HeartBeatHandler.java   |   12 +-
 .../ambari/server/agent/HeartbeatProcessor.java |   34 +-
 .../ambari/server/agent/StatusCommand.java      |   16 +
 .../org/apache/ambari/server/agent/hello.rs     |    0
 .../server/alerts/StaleAlertRunnable.java       |    2 +-
 .../ambari/server/api/predicate/QueryLexer.java |    2 +-
 .../server/api/predicate/QueryParser.java       |    4 +-
 .../expressions/LogicalExpression.java          |    2 +-
 .../api/predicate/operators/InOperator.java     |    2 +-
 .../ambari/server/api/query/QueryImpl.java      |    4 +-
 .../query/render/ClusterBlueprintRenderer.java  |   16 +-
 .../resources/ClusterResourceDefinition.java    |    3 +
 .../RequestScheduleResourceDefinition.java      |    2 +-
 .../resources/ResourceInstanceFactoryImpl.java  |    2 +-
 .../api/resources/SubResourceDefinition.java    |    2 +-
 .../ViewInstanceResourceDefinition.java         |    2 +-
 .../server/api/services/AmbariMetaInfo.java     |  131 +-
 .../server/api/services/ViewUrlsService.java    |    2 +-
 .../api/services/WidgetLayoutService.java       |    1 -
 .../services/parsers/JsonRequestBodyParser.java |    3 +-
 .../persistence/PersistenceManagerImpl.java     |    2 +-
 .../services/serializers/JsonSerializer.java    |    5 +-
 .../stackadvisor/StackAdvisorRequest.java       |    7 +-
 .../stackadvisor/StackAdvisorRunner.java        |    2 +-
 .../ambari/server/audit/AsyncAuditLogger.java   |    2 +-
 .../eventcreator/PrivilegeEventCreator.java     |    2 +-
 .../RepositoryVersionEventCreator.java          |    2 +-
 .../eventcreator/ViewPrivilegeEventCreator.java |    2 +-
 .../ambari/server/bootstrap/BootStrapImpl.java  |    2 +-
 .../DistributeRepositoriesStructuredOutput.java |   70 -
 .../server/checks/AbstractCheckDescriptor.java  |  252 +-
 .../server/checks/AtlasPresenceCheck.java       |   13 +-
 .../server/checks/AutoStartDisabledCheck.java   |   11 +
 .../ambari/server/checks/CheckDescription.java  |   65 +-
 .../server/checks/ClientRetryPropertyCheck.java |    8 +-
 .../checks/ComponentsExistInRepoCheck.java      |  140 +
 .../checks/ComponentsInstallationCheck.java     |    5 +-
 .../server/checks/ConfigurationMergeCheck.java  |    3 +-
 .../checks/DatabaseConsistencyCheckHelper.java  |  203 +-
 .../checks/DruidHighAvailabilityCheck.java      |  121 +
 .../ambari/server/checks/HealthCheck.java       |    3 +-
 .../HiveDynamicServiceDiscoveryCheck.java       |   15 +-
 .../checks/HiveMultipleMetastoreCheck.java      |    7 +-
 .../server/checks/HiveNotRollingWarning.java    |   11 +-
 .../checks/HostsMasterMaintenanceCheck.java     |   17 +-
 .../checks/HostsRepositoryVersionCheck.java     |   57 +-
 .../server/checks/InstallPackagesCheck.java     |   18 +-
 .../server/checks/KafkaKerberosCheck.java       |   10 +-
 ...apReduce2JobHistoryStatePreservingCheck.java |    7 +-
 .../server/checks/RangerAuditDbCheck.java       |   10 +-
 .../server/checks/RangerPasswordCheck.java      |   29 +-
 .../server/checks/RangerSSLConfigCheck.java     |    7 +-
 .../checks/SecondaryNamenodeDeletedCheck.java   |   24 +-
 .../checks/ServiceCheckValidityCheck.java       |   84 +-
 .../server/checks/ServicePresenceCheck.java     |    4 +-
 .../checks/ServicesMaintenanceModeCheck.java    |   11 +-
 .../ServicesMapReduceDistributedCacheCheck.java |   27 +-
 .../ServicesNamenodeHighAvailabilityCheck.java  |   11 +-
 .../checks/ServicesNamenodeTruncateCheck.java   |   26 +-
 .../ServicesTezDistributedCacheCheck.java       |   25 +-
 .../ambari/server/checks/ServicesUpCheck.java   |   11 +-
 .../checks/ServicesYarnWorkPreservingCheck.java |    7 +-
 .../server/checks/StormShutdownWarning.java     |   11 +-
 .../checks/YarnRMHighAvailabilityCheck.java     |    8 +-
 .../YarnTimelineServerStatePreservingCheck.java |   99 +-
 .../collections/functors/ContainsPredicate.java |    2 +-
 .../DelegatedMultiplePredicateContainer.java    |    2 +-
 .../DelegatedSinglePredicateContainer.java      |    2 +-
 .../collections/functors/EqualsPredicate.java   |    2 +-
 .../server/configuration/Configuration.java     |    6 +-
 .../controller/AmbariActionExecutionHelper.java |    4 +-
 .../AmbariCustomCommandExecutionHelper.java     |   86 +-
 .../AmbariManagementControllerImpl.java         |  198 +-
 .../server/controller/AuthToLocalBuilder.java   |  328 +-
 .../server/controller/ClusterRequest.java       |   16 -
 .../server/controller/ConfigGroupRequest.java   |   14 +-
 .../server/controller/ConfigGroupResponse.java  |    2 +-
 .../server/controller/ControllerModule.java     |    5 +-
 .../controller/DeleteIdentityHandler.java       |    7 +-
 .../ambari/server/controller/HostResponse.java  |    8 +-
 .../server/controller/KerberosHelper.java       |  195 +-
 .../server/controller/KerberosHelperImpl.java   | 1191 ++-
 .../server/controller/LdapSyncRequest.java      |    2 +-
 .../server/controller/PrereqCheckRequest.java   |   57 +-
 .../server/controller/RepositoryResponse.java   |   23 +-
 .../controller/ResourceProviderFactory.java     |    6 +-
 .../server/controller/ServiceRequest.java       |   24 +-
 .../server/controller/ServiceResponse.java      |   18 +-
 .../StackServiceComponentResponse.java          |   16 +
 .../AbstractAuthorizedResourceProvider.java     |    2 +-
 .../AbstractControllerResourceProvider.java     |    2 +-
 .../internal/AbstractProviderModule.java        |  116 +-
 .../BlueprintConfigurationProcessor.java        |   41 +-
 .../internal/BlueprintResourceProvider.java     |    4 +-
 .../internal/ClientConfigResourceProvider.java  |    2 +
 .../internal/ClusterControllerImpl.java         |    6 +-
 ...usterKerberosDescriptorResourceProvider.java |    3 +-
 .../internal/ClusterResourceProvider.java       |   10 +-
 .../ClusterStackVersionResourceProvider.java    |  148 +-
 .../internal/ComponentResourceProvider.java     |   18 +-
 .../internal/ConfigGroupResourceProvider.java   |   16 +-
 .../internal/ConfigurationResourceProvider.java |    2 +-
 .../internal/DefaultProviderModule.java         |    2 -
 .../internal/ExportBlueprintRequest.java        |    4 +-
 .../server/controller/internal/Extension.java   |    2 +-
 .../internal/ExtensionLinkResourceProvider.java |    4 +-
 .../internal/ExtensionResourceProvider.java     |    2 +-
 .../ExtensionVersionResourceProvider.java       |    2 +-
 .../internal/HostComponentResourceProvider.java |    8 +-
 .../internal/HostResourceProvider.java          |   84 +-
 .../HostStackVersionResourceProvider.java       |   32 +-
 .../internal/HttpPropertyProvider.java          |   27 +-
 .../KerberosDescriptorResourceProvider.java     |    2 +-
 .../OperatingSystemResourceProvider.java        |    2 +-
 .../PreUpgradeCheckResourceProvider.java        |   85 +-
 .../internal/PrivilegeResourceProvider.java     |    2 +-
 .../internal/ProvisionClusterRequest.java       |   18 +
 .../internal/ReadOnlyResourceProvider.java      |    2 +-
 .../internal/RemoteClusterResourceProvider.java |    2 +-
 .../internal/RepositoryResourceProvider.java    |    8 +-
 .../RepositoryVersionResourceProvider.java      |   20 +-
 .../server/controller/internal/RequestImpl.java |    6 +-
 .../internal/RequestResourceProvider.java       |   12 +-
 .../internal/RequestStageContainer.java         |    2 +-
 .../controller/internal/RequestStatusImpl.java  |    2 +-
 .../controller/internal/ResourceImpl.java       |    2 +-
 .../RootServiceComponentResourceProvider.java   |    2 +-
 ...ootServiceHostComponentResourceProvider.java |    2 +-
 .../internal/RootServiceResourceProvider.java   |    2 +-
 .../internal/ScaleClusterRequest.java           |    8 +-
 .../internal/ServiceResourceProvider.java       |  111 +-
 .../server/controller/internal/Stack.java       |   10 +-
 .../internal/StackAdvisorResourceProvider.java  |   24 +-
 ...ConfigurationDependencyResourceProvider.java |    2 +-
 .../StackConfigurationResourceProvider.java     |    2 +-
 ...StackLevelConfigurationResourceProvider.java |    2 +-
 .../internal/StackResourceProvider.java         |    2 +-
 .../StackServiceComponentResourceProvider.java  |    9 +-
 .../internal/StackServiceResourceProvider.java  |    2 +-
 .../internal/StackVersionResourceProvider.java  |   68 +-
 .../internal/UpgradeResourceProvider.java       |  166 +-
 .../VersionDefinitionResourceProvider.java      |   71 +-
 .../internal/ViewInstanceResourceProvider.java  |    2 +-
 .../internal/ViewPrivilegeResourceProvider.java |    2 +-
 .../internal/ViewResourceProvider.java          |    2 +-
 .../internal/ViewVersionResourceProvider.java   |    2 +-
 .../internal/WidgetLayoutResourceProvider.java  |    2 +-
 .../LoggingRequestHelperFactoryImpl.java        |   19 +-
 .../GangliaComponentPropertyProvider.java       |    2 +-
 .../GangliaHostComponentPropertyProvider.java   |    2 +-
 .../metrics/timeline/AMSPropertyProvider.java   |    2 +-
 .../metrics/timeline/MetricsRequestHelper.java  |    4 +-
 .../cache/TimelineMetricCacheEntryFactory.java  |    2 +-
 .../controller/utilities/PropertyHelper.java    |    2 +-
 .../utilities/RemovableIdentities.java          |    4 +-
 .../utilities/ScalingThreadPoolExecutor.java    |    2 +-
 .../controller/utilities/UsedIdentities.java    |    2 +-
 .../customactions/ActionDefinitionManager.java  |    4 +-
 .../events/ClusterConfigFinishedEvent.java      |   15 +-
 .../DistributeRepositoriesActionListener.java   |  108 +-
 .../upgrade/HostVersionOutOfSyncListener.java   |    2 +-
 .../upgrade/StackUpgradeFinishListener.java     |   13 +
 .../listeners/upgrade/StackVersionListener.java |   41 +-
 .../users/PostUserCreationHookContext.java      |    2 +-
 .../ambari/server/logging/LockFactory.java      |    2 +-
 .../CachedRoleCommandOrderProvider.java         |   12 +-
 .../server/metadata/RoleCommandOrder.java       |    2 +-
 .../dispatchers/AlertScriptDispatcher.java      |    2 +-
 .../apache/ambari/server/orm/DBAccessor.java    |   65 +
 .../ambari/server/orm/DBAccessorImpl.java       |  132 +-
 .../server/orm/dao/AlertDefinitionDAO.java      |   13 +
 .../ambari/server/orm/dao/ClusterDAO.java       |   22 +-
 .../server/orm/dao/HostConfigMappingDAO.java    |    2 +-
 .../server/orm/dao/HostRoleCommandDAO.java      |   49 +-
 .../server/orm/dao/RepositoryVersionDAO.java    |   35 +-
 .../ambari/server/orm/dao/UpgradeDAO.java       |   46 +-
 .../orm/entities/AlertDefinitionEntity.java     |    4 +-
 .../server/orm/entities/ArtifactEntity.java     |    2 +-
 .../orm/entities/ClusterConfigEntity.java       |   15 +-
 .../HostComponentDesiredStateEntity.java        |   14 -
 .../orm/entities/HostComponentStateEntity.java  |   13 -
 .../orm/entities/HostRoleCommandEntity.java     |   64 +-
 .../server/orm/entities/RepositoryEntity.java   |   22 +
 .../orm/entities/RepositoryVersionEntity.java   |   98 +-
 .../orm/entities/ServiceDesiredStateEntity.java |   13 -
 .../server/orm/entities/UpgradeEntity.java      |  131 +-
 .../server/orm/entities/UpgradeItemEntity.java  |    2 +-
 .../server/orm/helpers/dbms/DbmsHelper.java     |   27 +
 .../orm/helpers/dbms/GenericDbmsHelper.java     |   14 +
 .../server/orm/helpers/dbms/H2Helper.java       |   16 +
 .../server/orm/helpers/dbms/MySqlHelper.java    |   16 +
 .../server/orm/helpers/dbms/OracleHelper.java   |   16 +
 .../server/orm/helpers/dbms/PostgresHelper.java |   15 +
 .../scheduler/ExecutionScheduleManager.java     |   23 +-
 .../AmbariDelegatingAuthenticationFilter.java   |    2 +-
 .../pam/PamAuthenticationFactory.java           |   35 +
 .../AmbariPamAuthenticationProvider.java        |    3 +
 .../authorization/PermissionHelper.java         |    2 +-
 .../server/security/authorization/Users.java    |   56 +-
 .../serveraction/AbstractServerAction.java      |    2 +-
 .../AbstractPrepareKerberosServerAction.java    |   16 +-
 .../kerberos/CreateKeytabFilesServerAction.java |    9 +-
 .../kerberos/CreatePrincipalsServerAction.java  |    3 +-
 .../kerberos/DeconstructedPrincipal.java        |    2 +-
 .../kerberos/FinalizeKerberosServerAction.java  |   39 -
 .../kerberos/KerberosIdentityDataFile.java      |    2 +-
 .../KerberosIdentityDataFileWriter.java         |    9 +-
 .../kerberos/KerberosOperationHandler.java      |    6 +-
 .../kerberos/KerberosServerAction.java          |    6 +
 .../kerberos/PreconfigureServiceType.java       |   46 +
 .../PrepareDisableKerberosServerAction.java     |   45 +-
 .../PrepareEnableKerberosServerAction.java      |   43 +-
 .../PrepareKerberosIdentitiesServerAction.java  |   54 +-
 .../upgrades/AbstractUpgradeServerAction.java   |    7 +
 .../upgrades/AutoSkipFailedSummaryAction.java   |    2 +-
 .../upgrades/FinalizeUpgradeAction.java         |   79 +-
 .../upgrades/PreconfigureKerberosAction.java    |  573 ++
 .../RangerUsersyncConfigCalculation.java        |   96 +
 .../upgrades/UpdateDesiredRepositoryAction.java |  189 +
 .../upgrades/UpdateDesiredStackAction.java      |  189 -
 .../upgrades/UpgradeUserKerberosDescriptor.java |    4 +-
 .../server/stack/ConfigurationDirectory.java    |    5 +-
 .../ambari/server/stack/ExtensionDirectory.java |    3 +-
 .../apache/ambari/server/stack/RepoUtil.java    |    4 +-
 .../ambari/server/stack/ServiceDirectory.java   |   14 +-
 .../ambari/server/stack/ServiceModule.java      |   15 +-
 .../ambari/server/stack/StackContext.java       |  118 +-
 .../ambari/server/stack/StackDirectory.java     |   92 +-
 .../ambari/server/stack/StackManager.java       |   21 +-
 .../apache/ambari/server/stack/StackModule.java |   64 +-
 .../server/stack/StackServiceDirectory.java     |    7 +-
 .../ambari/server/stageplanner/RoleGraph.java   |    2 +-
 .../apache/ambari/server/state/CheckHelper.java |    4 +-
 .../org/apache/ambari/server/state/Cluster.java |    5 +
 .../ambari/server/state/ComponentInfo.java      |   15 +
 .../ambari/server/state/ConfigHelper.java       |   64 +-
 .../apache/ambari/server/state/ConfigImpl.java  |    2 +-
 .../ambari/server/state/ConfigMergeHelper.java  |    6 +-
 .../ambari/server/state/RepositoryInfo.java     |   27 +-
 .../ambari/server/state/RepositoryType.java     |   39 +-
 .../ambari/server/state/SecurityState.java      |   83 -
 .../org/apache/ambari/server/state/Service.java |   20 +-
 .../server/state/ServiceComponentHost.java      |   39 -
 .../server/state/ServiceComponentImpl.java      |   25 +
 .../apache/ambari/server/state/ServiceImpl.java |   35 +-
 .../apache/ambari/server/state/ServiceInfo.java |   17 +-
 .../ambari/server/state/ServiceOsSpecific.java  |   16 +
 .../apache/ambari/server/state/StackInfo.java   |   48 +-
 .../ambari/server/state/UpgradeContext.java     |  434 +-
 .../ambari/server/state/UpgradeHelper.java      |  269 +-
 .../state/alert/AggregateDefinitionMapping.java |    2 +-
 .../ambari/server/state/alert/AlertUri.java     |   15 +
 .../server/state/cluster/ClusterImpl.java       |   77 +-
 .../server/state/cluster/ClustersImpl.java      |   14 +-
 .../state/configgroup/ConfigGroupImpl.java      |   10 +-
 .../ambari/server/state/host/HostImpl.java      |    2 +-
 .../kerberos/AbstractKerberosDescriptor.java    |   46 +-
 .../AbstractKerberosDescriptorContainer.java    |   37 +-
 .../kerberos/KerberosComponentDescriptor.java   |    2 +-
 .../state/kerberos/KerberosDescriptor.java      |   56 +-
 .../kerberos/KerberosIdentityDescriptor.java    |   78 +-
 .../kerberos/KerberosKeytabDescriptor.java      |   42 +-
 .../kerberos/KerberosPrincipalDescriptor.java   |   25 +-
 .../kerberos/KerberosServiceDescriptor.java     |   94 +-
 .../ambari/server/state/quicklinks/Port.java    |   15 +
 .../DefaultQuickLinkVisibilityController.java   |    6 +-
 .../state/repository/AvailableVersion.java      |   21 +-
 .../state/repository/ClusterVersionSummary.java |   56 +
 .../state/repository/ManifestService.java       |   10 +-
 .../ambari/server/state/repository/Release.java |    2 +-
 .../state/repository/ServiceVersionSummary.java |   79 +
 .../state/repository/VersionDefinitionXml.java  |  161 +-
 .../services/AlertNoticeDispatchService.java    |    2 +-
 .../services/AmbariServerAlertService.java      |   21 +-
 .../server/state/stack/ConfigUpgradePack.java   |    2 +-
 .../server/state/stack/LatestRepoCallable.java  |   80 +-
 .../ambari/server/state/stack/OsFamily.java     |    2 +-
 .../server/state/stack/RepoUrlInfoCallable.java |  217 +
 .../server/state/stack/RepoVdfCallable.java     |  161 +
 .../server/state/stack/RepositoryXml.java       |   11 +
 .../state/stack/upgrade/ClusterGrouping.java    |    8 +-
 .../state/stack/upgrade/ColocatedGrouping.java  |    6 +-
 .../state/stack/upgrade/ConfigureTask.java      |    3 +
 .../server/state/stack/upgrade/Grouping.java    |   17 +-
 .../state/stack/upgrade/HostOrderGrouping.java  |    4 +-
 .../stack/upgrade/RepositoryVersionHelper.java  |   44 +-
 .../stack/upgrade/ServiceCheckGrouping.java     |   60 +-
 .../state/stack/upgrade/StageWrapper.java       |    2 +-
 .../stack/upgrade/StageWrapperBuilder.java      |    2 +-
 .../server/state/stack/upgrade/TaskWrapper.java |    2 +-
 .../svccomphost/ServiceComponentHostImpl.java   |   60 -
 .../ambari/server/topology/AmbariContext.java   |  129 +-
 .../server/topology/AsyncCallableService.java   |  110 +-
 .../ambari/server/topology/BlueprintImpl.java   |    2 +-
 .../topology/ClusterConfigurationRequest.java   |    6 +-
 .../ambari/server/topology/Configuration.java   |    4 +-
 .../ambari/server/topology/HostGroupImpl.java   |    2 +-
 .../ambari/server/topology/HostRequest.java     |    8 +-
 .../ambari/server/topology/LogicalRequest.java  |    2 +-
 .../server/topology/PersistedStateImpl.java     |   25 +-
 .../ambari/server/topology/TopologyManager.java |  131 +-
 .../topology/tasks/ConfigureClusterTask.java    |  125 +-
 .../tasks/ConfigureClusterTaskFactory.java      |    3 +-
 .../server/topology/tasks/InstallHostTask.java  |   24 +-
 .../tasks/PersistHostResourcesTask.java         |    3 +
 .../tasks/RegisterWithConfigGroupTask.java      |    3 +
 .../server/topology/tasks/StartHostTask.java    |   23 +-
 .../RequiredConfigPropertiesValidator.java      |    2 +-
 .../server/upgrade/AbstractUpgradeCatalog.java  |   12 +-
 .../server/upgrade/FinalUpgradeCatalog.java     |    4 +-
 .../server/upgrade/SchemaUpgradeHelper.java     |    1 +
 .../server/upgrade/UpgradeCatalog252.java       |  283 +
 .../server/upgrade/UpgradeCatalog260.java       |  827 ++
 .../server/upgrade/UpgradeCatalog300.java       |  193 +-
 .../server/utils/ManagedThreadPoolExecutor.java |   83 +
 .../apache/ambari/server/utils/StageUtils.java  |    2 +-
 .../ambari/server/view/ViewContextImpl.java     |    4 +-
 .../view/ViewDataMigrationContextImpl.java      |    2 +-
 .../server/view/ViewURLStreamProvider.java      |    4 +-
 .../view/configuration/InstanceConfig.java      |    2 +-
 .../view/configuration/PersistenceConfig.java   |    2 +-
 .../view/configuration/ResourceConfig.java      |    2 +-
 .../server/view/configuration/ViewConfig.java   |    8 +-
 .../ambari/server/view/events/EventImpl.java    |    2 +-
 .../server/view/persistence/DataStoreImpl.java  |    2 +-
 .../python/ambari_server/serverConfiguration.py |    7 +-
 .../main/python/ambari_server/serverUpgrade.py  |   12 +-
 .../src/main/python/azuredb_create_generator.py |    2 +-
 ambari-server/src/main/python/setupAgent.py     |    4 +-
 .../main/resources/Ambari-DDL-Derby-CREATE.sql  |   16 +-
 .../main/resources/Ambari-DDL-MySQL-CREATE.sql  |   15 +-
 .../main/resources/Ambari-DDL-Oracle-CREATE.sql |   13 +-
 .../resources/Ambari-DDL-Postgres-CREATE.sql    |   13 +-
 .../resources/Ambari-DDL-SQLAnywhere-CREATE.sql |   13 +-
 .../resources/Ambari-DDL-SQLServer-CREATE.sql   |   20 +-
 .../1.6.1.2.2.0/configuration/accumulo-env.xml  |    6 +
 .../ACCUMULO/1.6.1.2.2.0/kerberos.json          |    6 +-
 .../package/scripts/accumulo_client.py          |    7 +-
 .../package/scripts/accumulo_script.py          |   36 +-
 .../0.1.0/configuration/infra-solr-env.xml      |   22 +-
 .../AMBARI_INFRA/0.1.0/kerberos.json            |    6 +-
 .../0.1.0/package/scripts/params.py             |    4 +
 .../0.1.0/properties/infra-solr-env.sh.j2       |    3 +-
 .../properties/solr-client-log4j.properties.j2  |    4 +
 .../0.1.0/configuration/ams-env.xml             |   10 +
 .../0.1.0/configuration/ams-hbase-env.xml       |   16 +
 .../0.1.0/configuration/ams-hbase-site.xml      |   24 +-
 .../0.1.0/configuration/ams-site.xml            |   68 +-
 .../0.1.0/configuration/ams-ssl-client.xml      |    9 -
 .../AMBARI_METRICS/0.1.0/kerberos.json          |   32 +-
 .../AMBARI_METRICS/0.1.0/metainfo.xml           |   15 +
 .../HDF/grafana-nifi-hosts.json                 |   17 +-
 .../HDP/grafana-hbase-regionservers.json        | 1442 +++
 .../AMBARI_METRICS/0.1.0/package/scripts/ams.py |   42 +-
 .../0.1.0/package/scripts/params.py             |   19 +-
 .../0.1.0/package/scripts/params_linux.py       |    6 +
 .../0.1.0/package/scripts/service_check.py      |  201 +-
 .../package/templates/metric_monitor.ini.j2     |    2 +
 .../AMBARI_METRICS/0.1.0/themes/theme.json      |  288 +
 .../ATLAS/0.1.0.2.3/configuration/atlas-env.xml |    6 +
 .../ATLAS/0.1.0.2.3/kerberos.json               |    6 +-
 .../0.1.0.2.3/package/scripts/atlas_client.py   |   10 +-
 .../ATLAS/0.1.0.2.3/package/scripts/metadata.py |   22 +
 .../package/scripts/metadata_server.py          |    9 +-
 .../ATLAS/0.1.0.2.3/package/scripts/params.py   |    7 +-
 .../ATLAS/0.7.0.2.5/kerberos.json               |    9 +-
 .../ATLAS/0.7.0.3.0/configuration/atlas-env.xml |   12 +
 .../0.7.0.3.0/configuration/atlas-log4j.xml     |    4 +-
 .../ATLAS/0.7.0.3.0/kerberos.json               |    9 +-
 .../0.7.0.3.0/package/scripts/atlas_client.py   |    8 +-
 .../package/scripts/metadata_server.py          |    8 +-
 .../ATLAS/0.7.0.3.0/package/scripts/params.py   |    2 +-
 .../DRUID/0.9.2/configuration/druid-env.xml     |    6 +
 .../0.9.2/configuration/druid-logrotate.xml     |    2 +-
 .../0.9.2/configuration/druid-middlemanager.xml |   18 +
 .../0.9.2/configuration/druid-superset-env.xml  |  118 -
 .../0.9.2/configuration/druid-superset.xml      |  178 -
 .../common-services/DRUID/0.9.2/metainfo.xml    |   21 -
 .../DRUID/0.9.2/package/scripts/druid.py        |   38 +-
 .../DRUID/0.9.2/package/scripts/druid_node.py   |    9 +-
 .../DRUID/0.9.2/package/scripts/params.py       |   56 +-
 .../0.9.2/package/scripts/status_params.py      |    2 -
 .../DRUID/0.9.2/package/scripts/superset.py     |  169 -
 .../DRUID/0.9.2/package/templates/superset.sh   |   95 -
 .../DRUID/0.9.2/quicklinks/quicklinks.json      |   13 -
 .../DRUID/0.9.2/role_command_order.json         |    3 +-
 .../DRUID/0.9.2/themes/theme.json               |   82 +-
 .../0.5.0.2.1/configuration/falcon-env.xml      |    6 +
 .../0.5.0.2.1/configuration/oozie-site.xml      |  208 -
 .../FALCON/0.5.0.2.1/kerberos.json              |   12 +-
 .../FALCON/0.5.0.2.1/package/scripts/falcon.py  |    4 +-
 .../0.5.0.2.1/package/scripts/falcon_client.py  |    8 +-
 .../0.5.0.2.1/package/scripts/falcon_server.py  |    7 +-
 .../0.5.0.2.1/package/scripts/params_linux.py   |   25 +-
 .../FLUME/1.4.0.2.0/configuration/flume-env.xml |    6 +
 .../1.4.0.2.0/package/scripts/flume_handler.py  |    8 +-
 .../common-services/HAWQ/2.0.0/kerberos.json    |    9 +-
 .../0.96.0.2.0/configuration/hbase-env.xml      |   22 +-
 .../HBASE/0.96.0.2.0/kerberos.json              |    9 +-
 .../0.96.0.2.0/package/scripts/hbase_client.py  |   18 +-
 .../0.96.0.2.0/package/scripts/hbase_master.py  |    5 +-
 .../package/scripts/hbase_regionserver.py       |    5 +-
 .../0.96.0.2.0/package/scripts/params_linux.py  |    5 +
 .../package/scripts/phoenix_queryserver.py      |   11 +-
 .../HBASE/0.96.0.2.0/package/scripts/upgrade.py |    7 +-
 .../HBASE/2.0.0.3.0/configuration/hbase-env.xml |   10 +-
 .../HBASE/2.0.0.3.0/kerberos.json               |   15 +-
 .../2.0.0.3.0/package/scripts/hbase_client.py   |   16 +-
 .../2.0.0.3.0/package/scripts/hbase_master.py   |    5 +-
 .../package/scripts/hbase_regionserver.py       |    5 +-
 .../package/scripts/phoenix_queryserver.py      |    8 +-
 .../HBASE/2.0.0.3.0/package/scripts/upgrade.py  |    7 +-
 .../HDFS/2.1.0.2.0/configuration/hadoop-env.xml |   10 +
 .../HDFS/2.1.0.2.0/configuration/hdfs-site.xml  |    1 +
 .../HDFS/2.1.0.2.0/kerberos.json                |   18 +-
 .../HDFS/2.1.0.2.0/package/scripts/datanode.py  |   11 +-
 .../2.1.0.2.0/package/scripts/hdfs_client.py    |    8 +-
 .../2.1.0.2.0/package/scripts/hdfs_datanode.py  |    2 +-
 .../2.1.0.2.0/package/scripts/hdfs_snamenode.py |    6 +
 .../2.1.0.2.0/package/scripts/journalnode.py    |    8 +-
 .../HDFS/2.1.0.2.0/package/scripts/namenode.py  |   14 +-
 .../2.1.0.2.0/package/scripts/nfsgateway.py     |    8 +-
 .../2.1.0.2.0/package/scripts/params_linux.py   |    7 +-
 .../2.1.0.2.0/package/scripts/params_windows.py |    2 +-
 .../HDFS/2.1.0.2.0/package/scripts/snamenode.py |    8 +-
 .../HDFS/2.1.0.2.0/package/scripts/utils.py     |   17 +
 .../2.1.0.2.0/package/scripts/zkfc_slave.py     |   20 +-
 .../common-services/HDFS/2.1.0.2.0/widgets.json |    4 +-
 .../HDFS/3.0.0.3.0/configuration/hadoop-env.xml |   10 +
 .../HDFS/3.0.0.3.0/configuration/hdfs-site.xml  |    1 +
 .../HDFS/3.0.0.3.0/kerberos.json                |   21 +-
 .../HDFS/3.0.0.3.0/package/scripts/datanode.py  |   11 +-
 .../3.0.0.3.0/package/scripts/hdfs_client.py    |    8 +-
 .../3.0.0.3.0/package/scripts/hdfs_snamenode.py |    6 +
 .../3.0.0.3.0/package/scripts/journalnode.py    |    8 +-
 .../HDFS/3.0.0.3.0/package/scripts/namenode.py  |   11 +-
 .../3.0.0.3.0/package/scripts/nfsgateway.py     |    7 +-
 .../3.0.0.3.0/package/scripts/params_linux.py   |    2 +-
 .../3.0.0.3.0/package/scripts/params_windows.py |    2 +-
 .../HDFS/3.0.0.3.0/package/scripts/snamenode.py |    8 +-
 .../HDFS/3.0.0.3.0/package/scripts/utils.py     |   16 +
 .../3.0.0.3.0/package/scripts/zkfc_slave.py     |   12 +-
 .../HIVE/0.12.0.2.0/configuration/hive-env.xml  |   12 +
 .../HIVE/0.12.0.2.0/configuration/hive-site.xml |  218 +
 .../HIVE/0.12.0.2.0/kerberos.json               |   15 +-
 .../HIVE/0.12.0.2.0/metainfo.xml                |    1 +
 .../0.12.0.2.0/package/scripts/hcat_client.py   |    8 +-
 .../0.12.0.2.0/package/scripts/hive_client.py   |    9 +-
 .../package/scripts/hive_metastore.py           |   19 +-
 .../0.12.0.2.0/package/scripts/hive_server.py   |    7 +-
 .../package/scripts/hive_server_interactive.py  |   10 +-
 .../package/scripts/hive_server_upgrade.py      |   21 +-
 .../0.12.0.2.0/package/scripts/hive_service.py  |    6 +-
 .../0.12.0.2.0/package/scripts/params_linux.py  |   36 +-
 .../0.12.0.2.0/package/scripts/status_params.py |    8 +-
 .../HIVE/0.12.0.2.0/package/scripts/webhcat.py  |    2 +-
 .../package/scripts/webhcat_server.py           |   10 +-
 .../HIVE/2.1.0.3.0/configuration/hive-env.xml   |   12 +
 .../HIVE/2.1.0.3.0/kerberos.json                |   30 +-
 .../2.1.0.3.0/package/scripts/hcat_client.py    |    9 +-
 .../2.1.0.3.0/package/scripts/hive_client.py    |    9 +-
 .../2.1.0.3.0/package/scripts/hive_metastore.py |   19 +-
 .../2.1.0.3.0/package/scripts/hive_server.py    |    7 +-
 .../package/scripts/hive_server_interactive.py  |    7 +-
 .../package/scripts/hive_server_upgrade.py      |   17 +-
 .../2.1.0.3.0/package/scripts/hive_service.py   |    2 +-
 .../2.1.0.3.0/package/scripts/params_linux.py   |   13 +-
 .../2.1.0.3.0/package/scripts/webhcat_server.py |    9 +-
 .../0.10.0.3.0/configuration/kafka-env.xml      |    6 +
 .../ranger-kafka-plugin-properties.xml          |    6 +
 .../KAFKA/0.10.0.3.0/kerberos.json              |    9 +-
 .../KAFKA/0.10.0.3.0/metainfo.xml               |    1 +
 .../KAFKA/0.10.0.3.0/package/scripts/kafka.py   |   29 -
 .../0.10.0.3.0/package/scripts/kafka_broker.py  |   22 +-
 .../KAFKA/0.10.0.3.0/package/scripts/params.py  |    9 +-
 .../KAFKA/0.10.0.3.0/package/scripts/upgrade.py |    3 -
 .../KAFKA/0.10.0.3.0/service_advisor.py         |    5 +
 .../common-services/KAFKA/0.10.0/kerberos.json  |    9 +-
 .../KAFKA/0.8.1/configuration/kafka-env.xml     |    6 +
 .../common-services/KAFKA/0.8.1/metainfo.xml    |    1 +
 .../KAFKA/0.8.1/package/scripts/kafka.py        |   29 -
 .../KAFKA/0.8.1/package/scripts/kafka_broker.py |   25 +-
 .../KAFKA/0.8.1/package/scripts/params.py       |    8 -
 .../KAFKA/0.8.1/package/scripts/upgrade.py      |    7 +-
 .../ranger-kafka-plugin-properties.xml          |    6 +
 .../common-services/KAFKA/0.9.0/kerberos.json   |    6 +-
 .../1.10.3-10/configuration/kerberos-env.xml    |   27 +
 .../KERBEROS/1.10.3-10/kerberos.json            |    3 +-
 .../KERBEROS/1.10.3-10/metainfo.xml             |    8 +
 .../package/scripts/kerberos_client.py          |    3 +
 .../package/scripts/kerberos_common.py          |   50 +-
 .../1.10.3-30/configuration/kerberos-env.xml    |  423 +
 .../1.10.3-30/configuration/krb5-conf.xml       |   74 +
 .../KERBEROS/1.10.3-30/kerberos.json            |   18 +
 .../KERBEROS/1.10.3-30/metainfo.xml             |  131 +
 .../package/scripts/kerberos_client.py          |   56 +
 .../package/scripts/kerberos_common.py          |  494 +
 .../1.10.3-30/package/scripts/params.py         |  205 +
 .../1.10.3-30/package/scripts/service_check.py  |   85 +
 .../1.10.3-30/package/scripts/status_params.py  |   34 +
 .../KERBEROS/1.10.3-30/package/scripts/utils.py |  105 +
 .../KERBEROS/1.10.3-30/properties/krb5_conf.j2  |   60 +
 .../KNOX/0.5.0.2.2/configuration/knox-env.xml   |   10 +
 .../0.5.0.2.2/package/scripts/knox_gateway.py   |    9 +-
 .../0.5.0.2.2/package/scripts/params_linux.py   |  117 +-
 .../KNOX/0.5.0.3.0/configuration/knox-env.xml   |   10 +
 .../KNOX/0.5.0.3.0/kerberos.json                |    3 +-
 .../0.5.0.3.0/package/scripts/knox_gateway.py   |    9 +-
 .../0.5.0.3.0/package/scripts/params_linux.py   |  113 +-
 .../common-services/LOGSEARCH/0.5.0/alerts.json |    6 +-
 .../0.5.0/configuration/logsearch-env.xml       |   20 +-
 .../configuration/logsearch-properties.xml      |   21 +
 .../LOGSEARCH/0.5.0/kerberos.json               |    6 +-
 .../LOGSEARCH/0.5.0/package/scripts/params.py   |   13 +-
 .../logfeeder-default_grok_patterns.j2          |    2 +-
 .../0.5.0/properties/logsearch-env.sh.j2        |    2 -
 .../0.5.0/properties/output.config.json.j2      |    8 +-
 .../LOGSEARCH/0.5.0/quicklinks/quicklinks.json  |   10 +-
 .../LOGSEARCH/0.5.0/service_advisor.py          |   35 +-
 .../1.0.0.2.3/configuration/mahout-env.xml      |    6 +
 .../MAHOUT/1.0.0.2.3/kerberos.json              |    6 +-
 .../1.0.0.2.3/package/scripts/mahout_client.py  |    9 +-
 .../OOZIE/4.0.0.2.0/configuration/oozie-env.xml |   10 +
 .../OOZIE/4.0.0.2.0/kerberos.json               |   12 +-
 .../OOZIE/4.0.0.2.0/package/scripts/oozie.py    |   56 +-
 .../4.0.0.2.0/package/scripts/oozie_client.py   |    8 +-
 .../4.0.0.2.0/package/scripts/oozie_server.py   |   13 +-
 .../package/scripts/oozie_server_upgrade.py     |   36 +-
 .../4.0.0.2.0/package/scripts/params_linux.py   |   22 +-
 .../OOZIE/4.2.0.2.3/kerberos.json               |   12 +-
 .../OOZIE/4.2.0.2.3/quicklinks/quicklinks.json  |    9 +-
 .../OOZIE/4.2.0.3.0/configuration/oozie-env.xml |   10 +
 .../OOZIE/4.2.0.3.0/kerberos.json               |   12 +-
 .../4.2.0.3.0/package/scripts/oozie_client.py   |    8 +-
 .../4.2.0.3.0/package/scripts/oozie_server.py   |   12 +-
 .../OOZIE/4.2.0.3.0/quicklinks/quicklinks.json  |    9 +-
 .../PIG/0.12.0.2.0/kerberos.json                |    3 +-
 .../0.12.0.2.0/package/scripts/pig_client.py    |    9 +-
 .../PIG/0.16.1.3.0/kerberos.json                |    3 +-
 .../0.16.1.3.0/package/scripts/pig_client.py    |    9 +-
 .../RANGER/0.4.0/configuration/ranger-env.xml   |   10 +
 .../RANGER/0.4.0/package/scripts/params.py      |   41 +-
 .../0.4.0/package/scripts/ranger_admin.py       |   13 +-
 .../0.4.0/package/scripts/ranger_tagsync.py     |   13 +-
 .../0.4.0/package/scripts/ranger_usersync.py    |    7 +-
 .../0.4.0/package/scripts/setup_ranger_xml.py   |    6 +
 .../RANGER/0.4.0/package/scripts/upgrade.py     |   31 -
 .../RANGER/0.6.0/configuration/admin-log4j.xml  |    1 -
 .../0.6.0/configuration/tagsync-log4j.xml       |    3 +-
 .../0.6.0/configuration/usersync-log4j.xml      |    3 +-
 .../common-services/RANGER/0.6.0/kerberos.json  |   18 +-
 .../RANGER/0.7.0/configuration/ranger-env.xml   |   23 +
 .../0.7.0/configuration/ranger-ugsync-site.xml  |    7 +
 .../RANGER/0.7.0/themes/theme_version_5.json    |   40 +
 .../1.0.0.3.0/configuration/admin-log4j.xml     |    1 -
 .../1.0.0.3.0/configuration/ranger-env.xml      |   33 +
 .../configuration/ranger-ugsync-site.xml        |    7 +
 .../1.0.0.3.0/configuration/tagsync-log4j.xml   |    3 +-
 .../1.0.0.3.0/configuration/usersync-log4j.xml  |    3 +-
 .../RANGER/1.0.0.3.0/kerberos.json              |   18 +-
 .../RANGER/1.0.0.3.0/package/scripts/params.py  |    3 +-
 .../1.0.0.3.0/package/scripts/ranger_admin.py   |    9 +-
 .../1.0.0.3.0/package/scripts/ranger_tagsync.py |   11 +-
 .../package/scripts/ranger_usersync.py          |    5 +-
 .../RANGER/1.0.0.3.0/package/scripts/upgrade.py |    6 +-
 .../1.0.0.3.0/themes/theme_version_1.json       |   40 +
 .../0.5.0.2.3/configuration/kms-env.xml         |   10 +
 .../0.5.0.2.3/configuration/kms-log4j.xml       |    6 +-
 .../RANGER_KMS/0.5.0.2.3/kerberos.json          |   12 +-
 .../0.5.0.2.3/package/scripts/kms_server.py     |    6 +-
 .../0.5.0.2.3/package/scripts/params.py         |    8 +-
 .../0.5.0.2.3/package/scripts/upgrade.py        |   30 -
 .../1.0.0.3.0/configuration/kms-env.xml         |   10 +
 .../1.0.0.3.0/configuration/kms-log4j.xml       |    6 +-
 .../RANGER_KMS/1.0.0.3.0/kerberos.json          |   15 +-
 .../1.0.0.3.0/package/scripts/kms_server.py     |    5 +-
 .../1.0.0.3.0/package/scripts/params.py         |    3 +
 .../1.0.0.3.0/package/scripts/upgrade.py        |    5 +-
 .../SLIDER/0.60.0.2.2/kerberos.json             |    3 +-
 .../0.60.0.2.2/package/scripts/slider_client.py |   14 +-
 .../SLIDER/0.91.0.3.0/kerberos.json             |    3 +-
 .../0.91.0.3.0/package/scripts/slider_client.py |   14 +-
 .../SPARK/1.2.1/configuration/spark-env.xml     |   10 +
 .../common-services/SPARK/1.2.1/kerberos.json   |    6 +-
 .../scripts/alerts/alert_spark_thrift_port.py   |    2 +-
 .../1.2.1/package/scripts/job_history_server.py |    8 +-
 .../SPARK/1.2.1/package/scripts/livy_server.py  |    8 +-
 .../SPARK/1.2.1/package/scripts/params.py       |    6 +-
 .../1.2.1/package/scripts/service_check.py      |    3 +-
 .../SPARK/1.2.1/package/scripts/spark_client.py |    9 +-
 .../package/scripts/spark_thrift_server.py      |    8 +-
 .../common-services/SPARK/1.4.1/kerberos.json   |   12 +-
 .../SPARK/2.2.0/configuration/livy-env.xml      |   10 +
 .../SPARK/2.2.0/configuration/spark-env.xml     |   10 +
 .../common-services/SPARK/2.2.0/kerberos.json   |    3 +-
 .../2.2.0/package/scripts/job_history_server.py |    9 +-
 .../SPARK/2.2.0/package/scripts/livy_server.py  |    9 +-
 .../SPARK/2.2.0/package/scripts/spark_client.py |    9 +-
 .../package/scripts/spark_thrift_server.py      |    9 +-
 .../SPARK/2.2.0/service_advisor.py              |   48 +-
 .../SPARK2/2.0.0/configuration/spark2-env.xml   |   10 +
 .../common-services/SPARK2/2.0.0/kerberos.json  |   12 +-
 .../scripts/alerts/alert_spark2_thrift_port.py  |    2 +-
 .../2.0.0/package/scripts/job_history_server.py |    8 +-
 .../2.0.0/package/scripts/livy2_server.py       |   10 +-
 .../SPARK2/2.0.0/package/scripts/params.py      |    3 +-
 .../2.0.0/package/scripts/service_check.py      |    3 +-
 .../2.0.0/package/scripts/spark_client.py       |    8 +-
 .../package/scripts/spark_thrift_server.py      |    8 +-
 .../SQOOP/1.4.4.2.0/configuration/sqoop-env.xml |    6 +
 .../1.4.4.2.0/package/scripts/params_linux.py   |    2 +
 .../1.4.4.2.0/package/scripts/service_check.py  |    3 -
 .../1.4.4.2.0/package/scripts/sqoop_client.py   |    9 +-
 .../SQOOP/1.4.4.3.0/configuration/sqoop-env.xml |    6 +
 .../1.4.4.3.0/package/scripts/params_linux.py   |    2 +
 .../1.4.4.3.0/package/scripts/service_check.py  |    3 -
 .../1.4.4.3.0/package/scripts/sqoop_client.py   |    9 +-
 .../STORM/0.9.1/configuration/storm-env.xml     |    6 +
 .../common-services/STORM/0.9.1/kerberos.json   |    9 +-
 .../common-services/STORM/0.9.1/metainfo.xml    |    2 -
 .../STORM/0.9.1/package/scripts/drpc_server.py  |    7 +-
 .../STORM/0.9.1/package/scripts/nimbus.py       |    9 +-
 .../STORM/0.9.1/package/scripts/nimbus_prod.py  |    8 +-
 .../STORM/0.9.1/package/scripts/pacemaker.py    |    7 +-
 .../STORM/0.9.1/package/scripts/rest_api.py     |    3 -
 .../STORM/0.9.1/package/scripts/supervisor.py   |    8 +-
 .../0.9.1/package/scripts/supervisor_prod.py    |    8 +-
 .../STORM/0.9.1/package/scripts/ui_server.py    |    7 +-
 .../STORM/1.0.1.3.0/configuration/storm-env.xml |    6 +
 .../STORM/1.0.1.3.0/kerberos.json               |   15 +-
 .../STORM/1.0.1.3.0/metainfo.xml                |    2 -
 .../1.0.1.3.0/package/scripts/drpc_server.py    |    7 +-
 .../STORM/1.0.1.3.0/package/scripts/nimbus.py   |    8 +-
 .../1.0.1.3.0/package/scripts/nimbus_prod.py    |    8 +-
 .../1.0.1.3.0/package/scripts/pacemaker.py      |    7 +-
 .../STORM/1.0.1.3.0/package/scripts/rest_api.py |    3 -
 .../1.0.1.3.0/package/scripts/supervisor.py     |    8 +-
 .../package/scripts/supervisor_prod.py          |    8 +-
 .../1.0.1.3.0/package/scripts/ui_server.py      |    7 +-
 .../common-services/STORM/1.0.1/kerberos.json   |   15 +-
 .../common-services/STORM/1.0.1/metainfo.xml    |    2 -
 .../common-services/STORM/1.1.0/kerberos.json   |   15 +-
 .../common-services/STORM/1.1.0/metainfo.xml    |    2 -
 .../0.15.0/configuration/superset-env.xml       |  136 +
 .../SUPERSET/0.15.0/configuration/superset.xml  |  178 +
 .../SUPERSET/0.15.0/metainfo.xml                |   88 +
 .../SUPERSET/0.15.0/package/scripts/params.py   |   89 +
 .../0.15.0/package/scripts/service_check.py     |   37 +
 .../0.15.0/package/scripts/status_params.py     |   25 +
 .../SUPERSET/0.15.0/package/scripts/superset.py |  167 +
 .../0.15.0/package/templates/superset.sh        |   95 +
 .../SUPERSET/0.15.0/quicklinks/quicklinks.json  |   24 +
 .../SUPERSET/0.15.0/role_command_order.json     |    7 +
 .../SUPERSET/0.15.0/themes/theme.json           |  120 +
 .../TEZ/0.4.0.2.1/configuration/tez-env.xml     |   10 +
 .../common-services/TEZ/0.4.0.2.1/kerberos.json |    3 +-
 .../TEZ/0.4.0.2.1/package/scripts/tez_client.py |   11 +-
 .../TEZ/0.9.0.3.0/configuration/tez-env.xml     |   10 +
 .../common-services/TEZ/0.9.0.3.0/kerberos.json |    3 +-
 .../TEZ/0.9.0.3.0/package/scripts/tez_client.py |    9 +-
 .../configuration-mapred/mapred-env.xml         |    6 +
 .../YARN/2.1.0.2.0/configuration/yarn-env.xml   |    6 +
 .../YARN/2.1.0.2.0/kerberos.json                |   30 +-
 .../scripts/application_timeline_server.py      |    8 +-
 .../2.1.0.2.0/package/scripts/historyserver.py  |   10 +-
 .../package/scripts/mapreduce2_client.py        |   11 +-
 .../2.1.0.2.0/package/scripts/nodemanager.py    |    8 +-
 .../2.1.0.2.0/package/scripts/params_linux.py   |   18 +-
 .../package/scripts/resourcemanager.py          |    7 +-
 .../2.1.0.2.0/package/scripts/service_check.py  |    6 +-
 .../YARN/2.1.0.2.0/package/scripts/yarn.py      |   16 +-
 .../2.1.0.2.0/package/scripts/yarn_client.py    |    8 +-
 .../configuration-mapred/mapred-env.xml         |    6 +
 .../YARN/3.0.0.3.0/configuration/yarn-env.xml   |    9 +-
 .../YARN/3.0.0.3.0/kerberos.json                |   36 +-
 .../scripts/application_timeline_server.py      |    8 +-
 .../3.0.0.3.0/package/scripts/historyserver.py  |    8 +-
 .../package/scripts/mapreduce2_client.py        |    9 +-
 .../3.0.0.3.0/package/scripts/nodemanager.py    |    8 +-
 .../3.0.0.3.0/package/scripts/params_linux.py   |    5 +-
 .../package/scripts/resourcemanager.py          |    7 +-
 .../YARN/3.0.0.3.0/package/scripts/yarn.py      |   14 +-
 .../3.0.0.3.0/package/scripts/yarn_client.py    |    8 +-
 .../YARN/3.0.0.3.0/service_advisor.py           |    5 +
 .../ZEPPELIN/0.6.0.2.5/alerts.json              |   18 -
 .../0.6.0.2.5/configuration/zeppelin-config.xml |  196 -
 .../0.6.0.2.5/configuration/zeppelin-env.xml    |  188 -
 .../configuration/zeppelin-log4j-properties.xml |   37 -
 .../configuration/zeppelin-shiro-ini.xml        |   90 -
 .../ZEPPELIN/0.6.0.2.5/kerberos.json            |   53 -
 .../ZEPPELIN/0.6.0.2.5/metainfo.xml             |  103 -
 .../package/scripts/alert_check_zeppelin.py     |   47 -
 .../scripts/interpreter_json_template.py        |  361 -
 .../package/scripts/livy2_config_template.py    |  107 -
 .../0.6.0.2.5/package/scripts/master.py         |  467 -
 .../0.6.0.2.5/package/scripts/params.py         |  251 -
 .../0.6.0.2.5/package/scripts/service_check.py  |   39 -
 .../package/scripts/spark2_config_template.py   |   84 -
 .../0.6.0.2.5/package/scripts/status_params.py  |   29 -
 .../templates/input.config-zeppelin.json.j2     |   48 -
 .../0.6.0.2.5/quicklinks/quicklinks.json        |   35 -
 .../ZEPPELIN/0.6.0.2.5/role_command_order.json  |    7 -
 .../ZEPPELIN/0.6.0.3.0/alerts.json              |   18 -
 .../0.6.0.3.0/configuration/zeppelin-config.xml |  189 -
 .../0.6.0.3.0/configuration/zeppelin-env.xml    |  185 -
 .../configuration/zeppelin-log4j-properties.xml |   37 -
 .../configuration/zeppelin-shiro-ini.xml        |   90 -
 .../ZEPPELIN/0.6.0.3.0/kerberos.json            |   53 -
 .../ZEPPELIN/0.6.0.3.0/metainfo.xml             |  111 -
 .../package/scripts/alert_check_zeppelin.py     |   47 -
 .../package/scripts/livy2_config_template.py    |  107 -
 .../0.6.0.3.0/package/scripts/master.py         |  448 -
 .../0.6.0.3.0/package/scripts/params.py         |  228 -
 .../0.6.0.3.0/package/scripts/service_check.py  |   39 -
 .../package/scripts/spark2_config_template.py   |   84 -
 .../0.6.0.3.0/package/scripts/status_params.py  |   29 -
 .../templates/input.config-zeppelin.json.j2     |   48 -
 .../0.6.0.3.0/quicklinks/quicklinks.json        |   35 -
 .../ZEPPELIN/0.6.0.3.0/role_command_order.json  |    7 -
 .../ZEPPELIN/0.6.0.3.0/service_advisor.py       |  167 -
 .../common-services/ZEPPELIN/0.6.0/alerts.json  |   18 +
 .../0.6.0/configuration/zeppelin-config.xml     |  208 +
 .../0.6.0/configuration/zeppelin-env.xml        |  194 +
 .../configuration/zeppelin-log4j-properties.xml |   37 +
 .../0.6.0/configuration/zeppelin-shiro-ini.xml  |   97 +
 .../ZEPPELIN/0.6.0/kerberos.json                |   52 +
 .../common-services/ZEPPELIN/0.6.0/metainfo.xml |  103 +
 .../package/scripts/alert_check_zeppelin.py     |   47 +
 .../scripts/interpreter_json_template.py        |  361 +
 .../package/scripts/livy2_config_template.py    |  107 +
 .../ZEPPELIN/0.6.0/package/scripts/master.py    |  522 ++
 .../ZEPPELIN/0.6.0/package/scripts/params.py    |  258 +
 .../0.6.0/package/scripts/service_check.py      |   39 +
 .../package/scripts/spark2_config_template.py   |   84 +
 .../0.6.0/package/scripts/status_params.py      |   29 +
 .../templates/input.config-zeppelin.json.j2     |   48 +
 .../ZEPPELIN/0.6.0/quicklinks/quicklinks.json   |   35 +
 .../ZEPPELIN/0.6.0/role_command_order.json      |    7 +
 .../common-services/ZEPPELIN/0.7.0/alerts.json  |   18 +
 .../0.7.0/configuration/zeppelin-config.xml     |  214 +
 .../0.7.0/configuration/zeppelin-env.xml        |  194 +
 .../configuration/zeppelin-log4j-properties.xml |   37 +
 .../0.7.0/configuration/zeppelin-shiro-ini.xml  |   97 +
 .../ZEPPELIN/0.7.0/kerberos.json                |   52 +
 .../common-services/ZEPPELIN/0.7.0/metainfo.xml |  103 +
 .../package/scripts/alert_check_zeppelin.py     |   47 +
 .../scripts/interpreter_json_template.py        |  366 +
 .../package/scripts/livy2_config_template.py    |  112 +
 .../ZEPPELIN/0.7.0/package/scripts/master.py    |  572 ++
 .../ZEPPELIN/0.7.0/package/scripts/params.py    |  258 +
 .../0.7.0/package/scripts/service_check.py      |   39 +
 .../package/scripts/spark2_config_template.py   |   84 +
 .../0.7.0/package/scripts/status_params.py      |   29 +
 .../templates/input.config-zeppelin.json.j2     |   48 +
 .../ZEPPELIN/0.7.0/quicklinks/quicklinks.json   |   35 +
 .../ZEPPELIN/0.7.0/role_command_order.json      |    7 +
 .../ZEPPELIN/0.7.0/service_advisor.py           |  209 +
 .../3.4.5/configuration/zookeeper-env.xml       |    6 +
 .../ZOOKEEPER/3.4.5/kerberos.json               |    3 +-
 .../3.4.5/package/scripts/params_linux.py       |    6 +-
 .../3.4.5/package/scripts/zookeeper_client.py   |    7 +-
 .../3.4.5/package/scripts/zookeeper_server.py   |    7 +-
 .../src/main/resources/configuration-schema.xsd |    2 +-
 .../custom_actions/scripts/check_host.py        |   27 +-
 .../custom_actions/scripts/install_packages.py  |  143 +-
 .../scripts/remove_previous_stacks.py           |    7 +-
 .../custom_actions/scripts/ru_set_all.py        |   32 -
 .../custom_actions/scripts/update_repo.py       |    7 +-
 .../resources/host_scripts/alert_disk_space.py  |    2 +-
 .../resources/kerberos_descriptor_schema.json   |  194 +
 .../src/main/resources/properties.json          |    2 +
 .../src/main/resources/scripts/configs.py       |  219 +-
 .../src/main/resources/scripts/configs.sh       |  272 +-
 .../scripts/shared_initialization.py            |   15 -
 .../HDFS/package/scripts/hdfs_snamenode.py      |    6 +
 .../0.8/services/HDFS/package/scripts/params.py |    2 +-
 .../0.8/services/HDFS/package/scripts/utils.py  |    3 +
 .../HDP/2.0.6/configuration/cluster-env.xml     |   31 +
 .../2.0.6/hooks/after-INSTALL/scripts/hook.py   |    2 +-
 .../2.0.6/hooks/after-INSTALL/scripts/params.py |   17 +-
 .../scripts/shared_initialization.py            |   58 +-
 .../hooks/before-ANY/files/changeToSecureUid.sh |    2 +
 .../2.0.6/hooks/before-ANY/scripts/params.py    |   77 +-
 .../before-ANY/scripts/shared_initialization.py |   60 +-
 .../scripts/repo_initialization.py              |    3 +-
 .../before-START/scripts/custom_extensions.py   |  173 +
 .../2.0.6/hooks/before-START/scripts/hook.py    |    3 +
 .../2.0.6/hooks/before-START/scripts/params.py  |   43 +-
 .../before-START/scripts/rack_awareness.py      |    1 +
 .../scripts/shared_initialization.py            |   16 +-
 .../HDP/2.0.6/properties/stack_features.json    |   16 +-
 .../HDP/2.0.6/properties/stack_packages.json    | 1178 +++
 .../services/OOZIE/quicklinks/quicklinks.json   |    9 +-
 .../stacks/HDP/2.0.6/services/stack_advisor.py  |  133 +-
 .../FALCON/configuration/oozie-site.xml         |  198 -
 .../stacks/HDP/2.1/services/stack_advisor.py    |    4 +
 .../FALCON/configuration/oozie-site.xml         |  197 -
 .../services/HBASE/configuration/hbase-env.xml  |   10 +-
 .../stacks/HDP/2.2/services/YARN/kerberos.json  |   30 +-
 .../HDP/2.3.ECS/services/ECS/kerberos.json      |    3 +-
 .../services/ECS/package/scripts/params.py      |    2 +-
 .../HDP/2.3.ECS/services/HBASE/kerberos.json    |    9 +-
 .../HDP/2.3.ECS/services/YARN/kerberos.json     |   33 +-
 .../services/ACCUMULO/kerberos.json             |   27 +-
 .../HDP/2.3/services/ACCUMULO/kerberos.json     |    6 +-
 .../services/HBASE/configuration/hbase-env.xml  |    4 +-
 .../services/OOZIE/quicklinks/quicklinks.json   |    9 +-
 .../stacks/HDP/2.3/services/TEZ/kerberos.json   |    3 +-
 .../stacks/HDP/2.3/services/YARN/kerberos.json  |   30 +-
 .../stacks/HDP/2.3/services/stack_advisor.py    |  118 +
 .../stacks/HDP/2.3/upgrades/config-upgrade.xml  |    6 +-
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.3.xml |    6 +-
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.4.xml |    7 +-
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.5.xml |    7 +-
 .../HDP/2.3/upgrades/nonrolling-upgrade-2.6.xml |    7 +-
 .../stacks/HDP/2.3/upgrades/upgrade-2.4.xml     |    1 +
 .../stacks/HDP/2.3/upgrades/upgrade-2.5.xml     |    1 +
 .../stacks/HDP/2.3/upgrades/upgrade-2.6.xml     |    1 +
 .../stacks/HDP/2.4/upgrades/config-upgrade.xml  |    6 +-
 .../HDP/2.4/upgrades/nonrolling-upgrade-2.4.xml |    6 +-
 .../HDP/2.4/upgrades/nonrolling-upgrade-2.5.xml |    6 +-
 .../HDP/2.4/upgrades/nonrolling-upgrade-2.6.xml |    6 +-
 .../stacks/HDP/2.4/upgrades/upgrade-2.5.xml     |    1 +
 .../stacks/HDP/2.4/upgrades/upgrade-2.6.xml     |    1 +
 .../services/ATLAS/configuration/atlas-env.xml  |    6 +
 .../FALCON/configuration/oozie-site.xml         |   47 -
 .../HDP/2.5/services/FALCON/kerberos.json       |   15 +-
 .../stacks/HDP/2.5/services/HBASE/kerberos.json |   15 +-
 .../stacks/HDP/2.5/services/HDFS/kerberos.json  |   21 +-
 .../HIVE/configuration/hive-exec-log4j2.xml     |    6 +-
 .../services/HIVE/configuration/hive-log4j2.xml |   20 +-
 .../HIVE/configuration/llap-cli-log4j2.xml      |    6 +-
 .../HIVE/configuration/llap-daemon-log4j.xml    |    9 +-
 .../HIVE/configuration/tez-interactive-site.xml |   11 +
 .../stacks/HDP/2.5/services/HIVE/kerberos.json  |   30 +-
 .../stacks/HDP/2.5/services/KNOX/kerberos.json  |    3 +-
 .../HDP/2.5/services/RANGER_KMS/kerberos.json   |   15 +-
 .../services/SPARK/configuration/livy-env.xml   |   10 +
 .../stacks/HDP/2.5/services/SPARK/kerberos.json |   21 +-
 .../stacks/HDP/2.5/services/YARN/kerberos.json  |   36 +-
 .../HDP/2.5/services/ZEPPELIN/kerberos.json     |    7 +-
 .../HDP/2.5/services/ZEPPELIN/metainfo.xml      |    4 +-
 .../stacks/HDP/2.5/services/stack_advisor.py    |   81 +-
 .../stacks/HDP/2.5/upgrades/config-upgrade.xml  |   49 +-
 .../HDP/2.5/upgrades/nonrolling-upgrade-2.5.xml |   18 +-
 .../HDP/2.5/upgrades/nonrolling-upgrade-2.6.xml |   38 +-
 .../stacks/HDP/2.5/upgrades/upgrade-2.5.xml     |   11 +-
 .../stacks/HDP/2.5/upgrades/upgrade-2.6.xml     |   23 +-
 .../stacks/HDP/2.6/kerberos_preconfigure.json   |   22 +
 .../services/ATLAS/configuration/atlas-env.xml  |    6 +
 .../ATLAS/configuration/atlas-log4j.xml         |    4 +-
 .../stacks/HDP/2.6/services/ATLAS/kerberos.json |    9 +-
 .../stacks/HDP/2.6/services/ATLAS/metainfo.xml  |    1 +
 .../stacks/HDP/2.6/services/DRUID/kerberos.json |   30 +-
 .../services/HBASE/configuration/hbase-env.xml  |   87 +
 .../services/HBASE/configuration/hbase-site.xml |   19 +
 .../services/HDFS/configuration/core-site.xml   |   29 +
 .../services/HDFS/configuration/hadoop-env.xml  |    5 +
 .../stacks/HDP/2.6/services/HDFS/kerberos.json  |   21 +-
 .../services/HIVE/configuration/hive-env.xml    |    3 +-
 .../configuration/hive-interactive-site.xml     |   14 +
 .../services/HIVE/configuration/hive-site.xml   |    1 +
 .../HIVE/configuration/tez-interactive-site.xml |    6 +
 .../stacks/HDP/2.6/services/OOZIE/kerberos.json |   12 +-
 .../services/SPARK/configuration/livy-env.xml   |   10 +
 .../stacks/HDP/2.6/services/SPARK/kerberos.json |    6 +-
 .../HDP/2.6/services/SPARK2/kerberos.json       |   14 +-
 .../HDP/2.6/services/SUPERSET/kerberos.json     |   53 +
 .../HDP/2.6/services/SUPERSET/metainfo.xml      |   28 +
 .../YARN/configuration-mapred/mapred-site.xml   |    9 +
 .../services/YARN/configuration/yarn-site.xml   |   20 +-
 .../stacks/HDP/2.6/services/YARN/kerberos.json  |   59 +-
 .../ZEPPELIN/configuration/zeppelin-env.xml     |   10 +
 .../HDP/2.6/services/ZEPPELIN/kerberos.json     |    7 +-
 .../HDP/2.6/services/ZEPPELIN/metainfo.xml      |   21 +-
 .../stacks/HDP/2.6/services/stack_advisor.py    |  144 +-
 .../stacks/HDP/2.6/upgrades/config-upgrade.xml  |   31 +
 .../HDP/2.6/upgrades/nonrolling-upgrade-2.6.xml |   27 +-
 .../stacks/HDP/2.6/upgrades/upgrade-2.6.xml     |   15 +
 .../HDP/3.0/configuration/cluster-env.xml       |   37 +
 .../HDP/3.0/hooks/after-INSTALL/scripts/hook.py |    2 +-
 .../3.0/hooks/after-INSTALL/scripts/params.py   |    5 +-
 .../scripts/shared_initialization.py            |   38 +-
 .../HDP/3.0/hooks/before-ANY/scripts/params.py  |   31 +-
 .../3.0/hooks/before-START/scripts/params.py    |   13 +-
 .../scripts/shared_initialization.py            |   16 +-
 .../HDP/3.0/properties/stack_features.json      |    6 -
 .../HDP/3.0/properties/stack_packages.json      | 1056 +++
 .../HDP/3.0/services/KERBEROS/metainfo.xml      |   26 +
 .../HDP/3.0/services/ZEPPELIN/metainfo.xml      |    4 +-
 .../PERF/1.0/services/FAKEHBASE/kerberos.json   |   15 +-
 .../PERF/1.0/services/FAKEHDFS/kerberos.json    |   21 +-
 .../PERF/1.0/services/FAKEHDFS/widgets.json     |   48 +-
 .../PERF/1.0/services/FAKEYARN/kerberos.json    |   36 +-
 .../1.0/services/FAKEZOOKEEPER/kerberos.json    |    3 +-
 .../PERF/1.0/services/GRUMPY/kerberos.json      |   12 +-
 .../PERF/1.0/services/HAPPY/kerberos.json       |   12 +-
 .../PERF/1.0/services/KERBEROS/kerberos.json    |    3 +-
 .../PERF/1.0/services/KERBEROS/metainfo.xml     |    8 +
 .../KERBEROS/package/scripts/kerberos_client.py |    2 +
 .../KERBEROS/package/scripts/kerberos_common.py |   48 +
 .../PERF/1.0/services/SLEEPY/kerberos.json      |   12 +-
 .../stacks/PERF/1.0/services/SNOW/kerberos.json |   12 +-
 .../1.0/upgrades/nonrolling-upgrade-2.0.xml     |    6 +-
 .../src/main/resources/stacks/stack_advisor.py  |    6 +-
 .../src/main/resources/upgrade-pack.xsd         |    1 +
 .../src/main/resources/version_definition.xsd   |    9 +-
 .../TransactionalLockInterceptorTest.java       |    3 +-
 .../ambari/server/StateRecoveryManagerTest.java |   10 +-
 .../ExecutionCommandWrapperTest.java            |  150 +-
 .../ambari/server/actionmanager/StageTest.java  |    2 +-
 .../actionmanager/TestActionDBAccessorImpl.java |   63 +-
 .../actionmanager/TestActionScheduler.java      |  104 +
 .../server/agent/HeartbeatProcessorTest.java    |   87 +-
 .../server/agent/HeartbeatTestHelper.java       |    2 +-
 .../server/agent/TestHeartbeatHandler.java      |   38 +-
 .../server/agent/TestHeartbeatMonitor.java      |   26 +-
 .../alerts/AmbariPerformanceRunnableTest.java   |    3 +-
 .../ComponentVersionAlertRunnableTest.java      |    4 +-
 .../server/api/handlers/ReadHandlerTest.java    |   12 +-
 .../server/api/predicate/QueryParserTest.java   |   18 +
 .../ambari/server/api/query/QueryImplTest.java  |   24 +-
 .../ambari/server/api/query/QueryInfoTest.java  |    2 +-
 .../render/ClusterBlueprintRendererTest.java    |   23 +-
 .../api/query/render/DefaultRendererTest.java   |    8 +-
 .../api/query/render/MinimalRendererTest.java   |   14 +-
 .../resources/BaseResourceDefinitionTest.java   |    2 +-
 .../resources/SimpleResourceDefinitionTest.java |    8 +-
 .../server/api/services/AmbariMetaInfoTest.java |   87 +-
 .../server/api/services/ClusterServiceTest.java |    6 +-
 .../api/services/DeleteResultMetaDataTest.java  |    2 +-
 .../services/KerberosServiceMetaInfoTest.java   |    2 +-
 .../api/services/NamedPropertySetTest.java      |    2 +-
 .../api/services/RepositoryServiceTest.java     |    2 +-
 .../server/api/services/RequestBodyTest.java    |    2 +-
 .../server/api/services/RequestFactoryTest.java |   12 +-
 .../audit/request/DefaultEventCreatorTest.java  |    9 +-
 .../audit/request/RequestAuditLoggerTest.java   |    3 +-
 .../creator/AuditEventCreatorTestBase.java      |    2 +-
 .../ConfigurationChangeEventCreatorTest.java    |    2 +-
 .../RepositoryVersionEventCreatorTest.java      |    4 +
 .../server/bootstrap/BootStrapResourceTest.java |    2 +-
 .../checks/AbstractCheckDescriptorTest.java     |  310 +-
 .../server/checks/AtlasPresenceCheckTest.java   |    4 +-
 .../checks/ClientRetryPropertyCheckTest.java    |   42 +-
 .../checks/ComponentExistsInRepoCheckTest.java  |  352 +
 .../checks/ComponentsInstallationCheckTest.java |   62 +-
 .../checks/ConfigurationMergeCheckTest.java     |   15 +-
 .../DatabaseConsistencyCheckHelperTest.java     |  153 +-
 .../checks/DruidHighAvailabilityCheckTest.java  |  176 +
 .../ambari/server/checks/HealthCheckTest.java   |    2 +-
 .../HiveDynamicServiceDiscoveryCheckTest.java   |   25 +-
 .../checks/HiveMultipleMetastoreCheckTest.java  |   55 +-
 .../checks/HiveNotRollingWarningTest.java       |   39 +-
 .../server/checks/HostsHeartbeatCheckTest.java  |   16 +-
 .../checks/HostsMasterMaintenanceCheckTest.java |   73 +-
 .../checks/HostsRepositoryVersionCheckTest.java |   90 +-
 .../server/checks/InstallPackagesCheckTest.java |   18 +-
 .../server/checks/KafkaKerberosCheckTest.java   |   39 +-
 ...duce2JobHistoryStatePreservingCheckTest.java |   42 +-
 .../checks/PreviousUpgradeCompletedTest.java    |   11 +-
 .../server/checks/RangerAuditDbCheckTest.java   |   42 +-
 .../server/checks/RangerPasswordCheckTest.java  |   47 +-
 .../server/checks/RangerSSLConfigCheckTest.java |   38 +-
 .../SecondaryNamenodeDeletedCheckTest.java      |   56 +-
 .../checks/ServiceCheckValidityCheckTest.java   |   94 +-
 .../server/checks/ServicePresenceCheckTest.java |   34 +-
 .../ServicesMaintenanceModeCheckTest.java       |   46 +-
 ...vicesMapReduceDistributedCacheCheckTest.java |   50 +-
 ...rvicesNamenodeHighAvailabilityCheckTest.java |   39 +-
 .../ServicesNamenodeTruncateCheckTest.java      |   59 +-
 .../ServicesTezDistributedCacheCheckTest.java   |   39 +-
 .../server/checks/ServicesUpCheckTest.java      |   80 +-
 .../ServicesYarnWorkPreservingCheckTest.java    |   38 +-
 .../server/checks/StormShutdownWarningTest.java |   38 +-
 ...nTimelineServerStatePreservingCheckTest.java |    9 +-
 .../server/collections/PredicateUtilsTest.java  |    6 +-
 .../collections/functors/AndPredicateTest.java  |    8 +-
 .../collections/functors/NotPredicateTest.java  |    4 +-
 .../collections/functors/OrPredicateTest.java   |    8 +-
 .../AmbariCustomCommandExecutionHelperTest.java |   34 +-
 .../AmbariManagementControllerImplTest.java     |   63 +-
 .../AmbariManagementControllerTest.java         |  479 +-
 .../controller/AuthToLocalBuilderTest.java      |   87 +-
 .../BackgroundCustomCommandExecutionTest.java   |    4 +-
 .../server/controller/KerberosHelperTest.java   |  981 +-
 ...hYarnCapacitySchedulerReleaseConfigTest.java |    6 +-
 .../RootServiceResponseFactoryTest.java         |    2 +-
 .../ActiveWidgetLayoutResourceProviderTest.java |    3 +-
 .../AlertDefinitionResourceProviderTest.java    |    8 +-
 .../AlertGroupResourceProviderTest.java         |    2 +-
 .../AlertHistoryResourceProviderTest.java       |    2 +-
 .../AlertNoticeResourceProviderTest.java        |    2 +-
 .../AmbariPrivilegeResourceProviderTest.java    |    8 +-
 .../internal/ArtifactResourceProviderTest.java  |   12 +-
 .../internal/BaseBlueprintProcessorTest.java    |   13 +-
 .../BlueprintConfigurationProcessorTest.java    |  485 +-
 .../internal/BlueprintResourceProviderTest.java |   15 +-
 .../internal/CalculatedStatusTest.java          |    8 +-
 .../ClientConfigResourceProviderTest.java       |    9 +-
 .../internal/ClusterControllerImplTest.java     |   12 +-
 ...rKerberosDescriptorResourceProviderTest.java |   16 +-
 .../internal/ClusterResourceProviderTest.java   |   16 +-
 ...ClusterStackVersionResourceProviderTest.java |  635 +-
 .../internal/ComponentResourceProviderTest.java |   10 +-
 .../ConfigGroupResourceProviderTest.java        |    6 +-
 .../ConfigurationResourceProviderTest.java      |    2 +-
 .../internal/ExportBlueprintRequestTest.java    |    6 +-
 .../internal/ExtensionResourceProviderTest.java |    3 +-
 .../internal/FeedResourceProviderTest.java      |    6 +-
 .../GroupPrivilegeResourceProviderTest.java     |    2 +
 ...ostComponentProcessResourceProviderTest.java |    6 +-
 .../HostComponentResourceProviderTest.java      |   23 +-
 .../internal/HostResourceProviderTest.java      |  138 +-
 .../HostStackVersionResourceProviderTest.java   |   24 +-
 .../internal/HttpPropertyProviderTest.java      |   29 +-
 .../internal/InstanceResourceProviderTest.java  |    6 +-
 .../internal/JMXHostProviderTest.java           |    7 +-
 .../KerberosDescriptorResourceProviderTest.java |    7 +-
 .../internal/MemberResourceProviderTest.java    |   11 +-
 .../PreUpgradeCheckResourceProviderTest.java    |    8 +-
 .../internal/ProvisionClusterRequestTest.java   |    6 +-
 .../QuickLinkArtifactResourceProviderTest.java  |    6 +-
 .../RepositoryResourceProviderTest.java         |   25 +-
 .../controller/internal/RequestImplTest.java    |    7 +-
 .../internal/RequestResourceProviderTest.java   |   37 +-
 .../internal/RequestStageContainerTest.java     |    3 +-
 ...ootServiceComponentPropertyProviderTest.java |    3 +-
 ...ootServiceComponentResourceProviderTest.java |    5 +-
 ...erviceHostComponentResourceProviderTest.java |    2 +-
 .../RootServiceResourceProviderTest.java        |    5 +-
 .../internal/ScaleClusterRequestTest.java       |    6 +-
 .../internal/ServiceResourceProviderTest.java   |  278 +-
 .../SimplifyingPredicateVisitorTest.java        |    6 +-
 .../StackAdvisorResourceProviderTest.java       |   25 +
 .../StackDefinedPropertyProviderTest.java       |   18 +-
 .../internal/StackResourceProviderTest.java     |    3 +-
 .../StackServiceResourceProviderTest.java       |    5 +-
 .../server/controller/internal/StackTest.java   |   62 +-
 .../StackUpgradeConfigurationMergeTest.java     |  139 +-
 .../internal/StageResourceProviderTest.java     |    4 +-
 .../TargetClusterResourceProviderTest.java      |    6 +-
 .../internal/UpgradeResourceProviderTest.java   |  279 +-
 .../UpgradeSummaryResourceProviderTest.java     |    6 +-
 .../UserAuthorizationResourceProviderTest.java  |    2 +-
 .../UserPrivilegeResourceProviderTest.java      |   15 +-
 .../VersionDefinitionResourceProviderTest.java  |  184 +-
 .../ViewInstanceResourceProviderTest.java       |    3 +-
 .../internal/ViewURLResourceProviderTest.java   |    2 +-
 .../WidgetLayoutResourceProviderTest.java       |    6 +-
 .../LogSearchDataRetrievalServiceTest.java      |    2 +-
 .../LoggingRequestHelperFactoryImplTest.java    |   12 +-
 .../logging/LoggingRequestHelperImplTest.java   |    6 +-
 .../metrics/JMXPropertyProviderTest.java        |   12 +-
 .../RestMetricsPropertyProviderTest.java        |   12 +-
 .../ganglia/GangliaPropertyProviderTest.java    |    2 +-
 .../timeline/MetricsRequestHelperTest.java      |    6 +-
 .../utilities/KerberosIdentityCleanerTest.java  |    2 +-
 .../listeners/tasks/TaskStatusListenerTest.java |    3 +-
 .../upgrade/StackUpgradeFinishListenerTest.java |    6 +
 .../upgrade/StackVersionListenerTest.java       |  104 +-
 .../server/hooks/users/UserHookServiceTest.java |    4 +-
 .../ambari/server/orm/DBAccessorImplTest.java   |   88 +
 .../apache/ambari/server/orm/OrmTestHelper.java |    2 +-
 .../server/orm/dao/AlertDefinitionDAOTest.java  |    6 +
 .../server/orm/dao/HostRoleCommandDAOTest.java  |   10 +-
 .../orm/dao/RepositoryVersionDAOTest.java       |   12 +
 .../ambari/server/orm/dao/RequestDAOTest.java   |    6 +-
 .../ambari/server/orm/dao/StageDAOTest.java     |    2 +-
 .../ambari/server/orm/dao/UpgradeDAOTest.java   |  110 +-
 .../ambari/server/orm/dao/WidgetDAOTest.java    |    2 +-
 .../apache/ambari/server/orm/db/DDLTests.java   |   24 +-
 .../server/orm/entities/HostEntityTest.java     |   12 +-
 .../orm/entities/LdapSyncEventEntityTest.java   |    2 +-
 .../orm/entities/LdapSyncSpecEntityTest.java    |   10 +-
 .../scheduler/ExecutionScheduleManagerTest.java |   42 +-
 .../authorization/AmbariAuthenticationTest.java |   11 +-
 .../AmbariAuthorizationFilterTest.java          |    8 +-
 .../AmbariPamAuthenticationProviderTest.java    |  133 +-
 .../security/authorization/UsersTest.java       |    4 +-
 .../ldap/AmbariLdapDataPopulatorTest.java       |   83 +-
 .../serveraction/ServerActionExecutorTest.java  |    2 +-
 .../ADKerberosOperationHandlerTest.java         |    2 +-
 ...AbstractPrepareKerberosServerActionTest.java |    2 +-
 .../FinalizeKerberosServerActionTest.java       |    8 +-
 .../kerberos/KerberosIdentityDataFileTest.java  |    8 +-
 .../kerberos/KerberosOperationHandlerTest.java  |    2 +-
 .../kerberos/KerberosServerActionTest.java      |    2 +-
 .../MITKerberosOperationHandlerTest.java        |   20 +-
 .../UpdateKerberosConfigsServerActionTest.java  |    2 +-
 .../ComponentVersionCheckActionTest.java        |   14 -
 .../upgrades/ConfigureActionTest.java           |    6 +-
 .../upgrades/HiveZKQuorumConfigActionTest.java  |    2 +-
 .../PreconfigureKerberosActionTest.java         |  596 ++
 .../RangerKerberosConfigCalculationTest.java    |    2 +-
 .../RangerUsersyncConfigCalculationTest.java    |  126 +
 .../upgrades/UpgradeActionTest.java             |    6 +-
 .../UpgradeUserKerberosDescriptorTest.java      |    6 +-
 .../PostUserCreationHookServerActionTest.java   |    4 +-
 .../server/stack/ComponentModuleTest.java       |    2 +-
 .../server/stack/KerberosDescriptorTest.java    |  164 +-
 .../QuickLinksConfigurationModuleTest.java      |   14 +
 .../ambari/server/stack/ServiceModuleTest.java  |   27 +-
 .../ambari/server/stack/StackManagerTest.java   |    5 +-
 .../ambari/server/stack/StackModuleTest.java    |   10 +-
 .../server/stack/StackServiceDirectoryTest.java |   76 +
 .../ambari/server/state/CheckHelperTest.java    |  246 +-
 .../ambari/server/state/ConfigHelperTest.java   |    8 +-
 .../server/state/ServiceComponentTest.java      |   21 +-
 .../apache/ambari/server/state/ServiceTest.java |   41 -
 .../ambari/server/state/UpgradeContextTest.java |  413 +
 .../ambari/server/state/UpgradeHelperTest.java  |  219 +-
 .../state/alerts/AlertDefinitionHashTest.java   |    7 +-
 .../state/alerts/AlertReceivedListenerTest.java |    3 +-
 .../alerts/AlertStateChangedEventTest.java      |    4 +-
 .../state/cluster/ClusterDeadlockTest.java      |    6 +-
 .../server/state/cluster/ClusterTest.java       |   57 +-
 .../server/state/cluster/ClustersTest.java      |   15 +-
 ...omponentHostConcurrentWriteDeadlockTest.java |    6 +-
 .../ambari/server/state/host/HostTest.java      |    4 +-
 .../KerberosComponentDescriptorTest.java        |   18 +-
 .../KerberosConfigurationDescriptorTest.java    |    2 +-
 .../state/kerberos/KerberosDescriptorTest.java  |  108 +-
 .../KerberosDescriptorUpdateHelperTest.java     |    9 +-
 .../KerberosIdentityDescriptorTest.java         |   49 +-
 .../kerberos/KerberosKeytabDescriptorTest.java  |   28 +-
 .../KerberosPrincipalDescriptorTest.java        |   24 +-
 .../kerberos/KerberosServiceDescriptorTest.java |   26 +-
 .../kerberos/VariableReplacementHelperTest.java |   35 +-
 .../quicklinksprofile/FilterEvaluatorTest.java  |    8 +-
 .../QuickLinkVisibilityControllerTest.java      |   40 +-
 .../state/repository/VersionDefinitionTest.java |  148 +
 .../AlertNoticeDispatchServiceTest.java         |    2 +-
 .../services/RetryUpgradeActionServiceTest.java |    2 +-
 .../server/state/stack/UpgradePackTest.java     |    9 +-
 .../svccomphost/ServiceComponentHostTest.java   |   82 +-
 .../server/testing/DBInconsistencyTests.java    |    2 +
 .../server/topology/AmbariContextTest.java      |  139 +-
 .../topology/AsyncCallableServiceTest.java      |   89 +-
 .../server/topology/BlueprintFactoryTest.java   |    2 +-
 .../topology/BlueprintValidatorImplTest.java    |   20 +-
 .../ClusterConfigurationRequestTest.java        |   81 +-
 .../ClusterDeployWithStartOnlyTest.java         |   51 +-
 ...InstallWithoutStartOnComponentLevelTest.java |   55 +-
 .../ClusterInstallWithoutStartTest.java         |   53 +-
 .../topology/ClusterTopologyImplTest.java       |    8 +-
 .../topology/ConfigureClusterTaskTest.java      |   84 +-
 .../server/topology/LogicalRequestTest.java     |   31 +-
 .../topology/RequiredPasswordValidatorTest.java |   32 +-
 .../server/topology/TopologyManagerTest.java    |   76 +-
 .../validators/HiveServiceValidatorTest.java    |    6 +-
 .../RequiredConfigPropertiesValidatorTest.java  |   28 +-
 .../StackConfigTypeValidatorTest.java           |    2 +-
 .../server/upgrade/UpgradeCatalog251Test.java   |    2 +-
 .../server/upgrade/UpgradeCatalog252Test.java   |  260 +
 .../server/upgrade/UpgradeCatalog260Test.java   |  871 ++
 .../server/upgrade/UpgradeCatalog300Test.java   |  208 +-
 .../utils/ManagedThreadPoolExecutorTest.java    |   51 +
 .../ambari/server/utils/StageUtilsTest.java     |    6 +
 .../utils/SynchronousThreadPoolExecutor.java    |    2 +-
 .../ambari/server/utils/TestParallel.java       |    2 +-
 .../server/view/RemoteAmbariClusterTest.java    |    7 +-
 .../view/ViewDataMigrationUtilityTest.java      |    4 +-
 .../ambari/server/view/ViewRegistryTest.java    |   20 +-
 .../view/ViewSubResourceProviderTest.java       |    4 +-
 .../server/view/events/EventImplTest.java       |    8 +-
 .../test/python/TestComponentVersionMapping.py  |   84 +
 ambari-server/src/test/python/TestConfigs.py    |   38 +-
 ambari-server/src/test/python/TestMpacks.py     |    2 +-
 .../src/test/python/TestStackFeature.py         |  149 +-
 .../src/test/python/TestStackSelect.py          |  258 +
 .../src/test/python/TestUpgradeSummary.py       |  137 +
 .../src/test/python/TestVersionSelectUtil.py    |   18 +-
 .../HIVE/test_jdbc_driver_config.py             |   66 +
 .../LOGSEARCH/test_service_advisor.py           |   30 +-
 .../RANGER/test_db_flavor_config.py             |   63 +
 .../RANGER_KMS/test_db_flavor_config.py         |   63 +
 .../SPARK/2.2.0/test_service_advisor.py         |  289 +
 .../SQOOP/test_jdbc_driver_config.py            |   63 +
 .../common-services/configs/hive_default.json   |  650 ++
 .../configs/hive_unsupported_jdbc_type.json     |  650 ++
 .../configs/ranger_admin_default.json           |  386 +
 .../ranger_admin_unsupported_db_flavor.json     |  386 +
 .../configs/ranger_kms_default.json             |  802 ++
 .../ranger_kms_unsupported_db_flavor.json       |  802 ++
 .../common-services/configs/sqoop_default.json  |  879 ++
 .../configs/sqoop_unsupported_jdbc_driver.json  |  879 ++
 .../test/python/custom_actions/TestCheckHost.py |   33 +
 .../custom_actions/TestInstallPackages.py       | 1542 +--
 .../custom_actions/TestRemoveStackVersion.py    |   40 +-
 .../configs/install_packages_config.json        |    2 +-
 .../python/custom_actions/test_ru_set_all.py    |   35 +-
 .../python/stacks/2.0.6/FLUME/test_flume.py     |    5 +-
 .../stacks/2.0.6/HBASE/test_hbase_client.py     |   21 +-
 .../stacks/2.0.6/HBASE/test_hbase_master.py     |  114 +-
 .../2.0.6/HBASE/test_hbase_regionserver.py      |   14 +-
 .../2.0.6/HBASE/test_phoenix_queryserver.py     |   15 +-
 .../python/stacks/2.0.6/HDFS/test_datanode.py   |   57 +-
 .../stacks/2.0.6/HDFS/test_hdfs_client.py       |   15 +-
 .../stacks/2.0.6/HDFS/test_journalnode.py       |   13 +-
 .../python/stacks/2.0.6/HDFS/test_namenode.py   |   18 +-
 .../python/stacks/2.0.6/HDFS/test_nfsgateway.py |    7 +-
 .../stacks/2.0.6/HIVE/test_hcat_client.py       |    3 +
 .../stacks/2.0.6/HIVE/test_hive_client.py       |   23 +-
 .../stacks/2.0.6/HIVE/test_hive_metastore.py    |   42 +-
 .../stacks/2.0.6/HIVE/test_hive_server.py       |  186 +-
 .../2.0.6/HIVE/test_hive_service_check.py       |   16 +-
 .../stacks/2.0.6/HIVE/test_webhcat_server.py    |   60 +-
 .../stacks/2.0.6/OOZIE/test_oozie_client.py     |   15 +-
 .../stacks/2.0.6/OOZIE/test_oozie_server.py     |  141 +-
 .../2.0.6/OOZIE/test_oozie_service_check.py     |    5 +-
 .../python/stacks/2.0.6/PIG/test_pig_client.py  |   20 +-
 .../python/stacks/2.0.6/SQOOP/test_sqoop.py     |   14 +-
 .../stacks/2.0.6/YARN/test_historyserver.py     |   15 +-
 .../stacks/2.0.6/YARN/test_mapreduce2_client.py |   16 +-
 .../stacks/2.0.6/YARN/test_nodemanager.py       |   16 +-
 .../stacks/2.0.6/YARN/test_resourcemanager.py   |  173 +-
 .../stacks/2.0.6/YARN/test_yarn_client.py       |   14 +-
 .../2.0.6/ZOOKEEPER/test_zookeeper_client.py    |   14 +-
 .../2.0.6/ZOOKEEPER/test_zookeeper_server.py    |   17 +-
 .../stacks/2.0.6/common/test_stack_advisor.py   |    4 +-
 .../python/stacks/2.0.6/configs/default.json    |    3 +-
 .../default_yarn_include_file_dont_manage.json  | 1260 +++
 .../default_yarn_include_file_manage.json       | 1260 +++
 .../test/python/stacks/2.0.6/configs/nn_eu.json |   21 +-
 .../stacks/2.0.6/configs/nn_eu_standby.json     |   21 +-
 .../python/stacks/2.0.6/configs/secured.json    | 1210 +--
 .../secured_yarn_include_file_dont_manage.json  | 1078 +++
 .../secured_yarn_include_file_manage.json       | 1078 +++
 .../hooks/after-INSTALL/test_after_install.py   |   70 +-
 .../2.0.6/hooks/before-ANY/test_before_any.py   |  121 +-
 .../hooks/before-INSTALL/test_before_install.py |   10 +
 .../hooks/before-START/test_before_start.py     |    6 +-
 .../stacks/2.1/FALCON/test_falcon_client.py     |   19 +-
 .../stacks/2.1/FALCON/test_falcon_server.py     |   56 +-
 .../stacks/2.1/FALCON/test_service_check.py     |    5 +-
 .../stacks/2.1/HIVE/test_hive_metastore.py      |  158 +-
 .../stacks/2.1/STORM/test_storm_drpc_server.py  |   14 +-
 .../stacks/2.1/STORM/test_storm_nimbus.py       |   14 +-
 .../stacks/2.1/STORM/test_storm_nimbus_prod.py  |   14 +-
 .../stacks/2.1/STORM/test_storm_supervisor.py   |   14 +-
 .../2.1/STORM/test_storm_supervisor_prod.py     |   15 +-
 .../stacks/2.1/STORM/test_storm_ui_server.py    |   15 +-
 .../python/stacks/2.1/TEZ/test_tez_client.py    |   19 +-
 .../stacks/2.1/YARN/test_apptimelineserver.py   |   13 +-
 .../stacks/2.1/common/test_stack_advisor.py     |   28 +
 .../2.1/configs/hive-metastore-upgrade.json     |   21 +-
 .../stacks/2.2/ACCUMULO/test_accumulo_client.py |   15 +-
 .../stacks/2.2/KAFKA/test_kafka_broker.py       |   15 +-
 .../stacks/2.2/KERBEROS/test_kerberos_client.py |   23 +
 .../python/stacks/2.2/KNOX/test_knox_gateway.py |   62 +-
 .../stacks/2.2/PIG/test_pig_service_check.py    |   13 +
 .../stacks/2.2/RANGER/test_ranger_admin.py      |   10 -
 .../stacks/2.2/RANGER/test_ranger_usersync.py   |    9 -
 .../stacks/2.2/SLIDER/test_slider_client.py     |   21 +-
 .../stacks/2.2/SPARK/test_job_history_server.py |   31 +-
 .../stacks/2.2/SPARK/test_spark_client.py       |   13 +-
 .../2.2/SPARK/test_spark_service_check.py       |    8 +-
 .../stacks/2.2/common/test_conf_select.py       |   30 +-
 .../stacks/2.2/common/test_stack_advisor.py     |  232 +-
 .../python/stacks/2.2/configs/knox_upgrade.json |   21 +-
 .../stacks/2.3/ATLAS/test_metadata_server.py    |    6 +-
 .../stacks/2.3/MAHOUT/test_mahout_client.py     |   20 +-
 .../2.3/MAHOUT/test_mahout_service_check.py     |   28 +-
 .../2.3/SPARK/test_spark_thrift_server.py       |   21 +-
 .../stacks/2.3/common/test_stack_advisor.py     |   22 +-
 .../stacks/2.3/configs/storm_default.json       |    6 +-
 .../2.3/configs/storm_default_secure.json       |    6 +-
 .../stacks/2.4/LOGSEARCH/test_logsearch.py      |    2 +
 .../test/python/stacks/2.4/configs/default.json |    5 +-
 .../stacks/2.5/ATLAS/test_atlas_server.py       |    2 +-
 .../stacks/2.5/RANGER_KMS/test_kms_server.py    |   24 +-
 .../python/stacks/2.5/SPARK/test_spark_livy.py  |   16 +-
 .../2.5/ZEPPELIN/interpreter_json_generated.py  |   27 +
 .../stacks/2.5/ZEPPELIN/test_zeppelin_060.py    |  339 +
 .../stacks/2.5/ZEPPELIN/test_zeppelin_master.py |  330 -
 .../stacks/2.5/common/test_stack_advisor.py     |  304 +-
 .../test/python/stacks/2.5/configs/default.json |   53 +-
 .../2.5/configs/ranger-admin-default.json       |    1 -
 .../2.5/configs/ranger-admin-secured.json       |    1 -
 .../stacks/2.5/configs/ranger-kms-default.json  |    1 -
 .../stacks/2.5/configs/ranger-kms-secured.json  |    7 +-
 .../test/python/stacks/2.5/configs/secured.json |   71 +-
 .../test/python/stacks/2.6/DRUID/test_druid.py  |   52 +-
 .../stacks/2.6/SPARK2/test_spark_livy2.py       |   16 +-
 .../2.6/ZEPPELIN/interpreter_json_generated.py  |   27 +
 .../stacks/2.6/ZEPPELIN/test_zeppelin_070.py    |  442 +
 .../stacks/2.6/common/test_stack_advisor.py     |  531 +-
 .../test/python/stacks/2.6/configs/default.json |   82 +-
 .../2.6/configs/ranger-admin-default.json       |    1 -
 .../2.6/configs/ranger-admin-secured.json       |    1 -
 .../src/test/python/stacks/utils/RMFTestCase.py |  118 +-
 .../PreconfigureActionTest_cluster_config.json  |  110 +
 ...ureActionTest_kerberos_descriptor_stack.json |  713 ++
 ..._kerberos_descriptor_stack_preconfigure.json |  730 ++
 .../src/test/resources/TestConfigs-content.xml  |   28 +
 .../src/test/resources/hbase_version_test.xml   |    2 +
 .../test_kerberos_descriptor_ranger_kms.json    |  109 +
 .../src/test/resources/parent_quicklinks.json   |    1 +
 .../stacks/HDP/2.0.8/kerberos_preconfigure.json |   23 +
 .../HDP/2.0.8/services/HDFS/kerberos.json       |   12 +-
 .../upgrades/upgrade_nonrolling_new_stack.xml   |    6 +-
 .../HDP/2.1.1/upgrades/upgrade_test_HDP-250.xml |  267 +
 .../HDP/2.1.1/upgrades/upgrade_test_checks.xml  |   15 +
 .../upgrade_test_force_config_change.xml        |  267 +
 .../2.1.1/upgrades/upgrade_test_nonrolling.xml  |    4 +-
 .../resources/stacks/HDP/2.2.0/repos/hdp.json   |    4 +
 .../stacks/HDP/2.2.0/repos/repoinfo.xml         |    8 +
 .../stacks/HDP/2.2.0/repos/version-2.2.0.5.xml  |   51 +
 .../HDP/2.2.0/upgrades/upgrade_test_checks.xml  |    2 +-
 .../resources/stacks/HDP/2.2.1/metainfo.xml     |   24 +
 .../resources/stacks/HDP/2.2.1/repos/hdp.json   |    7 +
 .../stacks/HDP/2.2.1/repos/repoinfo.xml         |   36 +
 .../HDP/2.2.1/services/RANGER/alerts.json       |   74 +
 .../resources/stacks/OTHER/1.0/widgets.json     |   95 +
 .../resources/version_definition_test_maint.xml |   62 +
 .../version_definition_test_maint_partial.xml   |   54 +
 .../version_definition_test_patch_config.xml    |   55 +
 ambari-web/app/assets/test/tests.js             |    1 +
 ambari-web/app/config.js                        |    1 -
 .../global/background_operations_controller.js  |    9 +-
 .../controllers/global/cluster_controller.js    |    5 +
 .../app/controllers/global/update_controller.js |    2 +-
 ambari-web/app/controllers/installer.js         |   62 +-
 .../journalNode/progress_controller.js          |    4 +-
 .../journalNode/step4_controller.js             |    6 +-
 .../journalNode/step8_controller.js             |    2 +-
 .../nameNode/step5_controller.js                |    6 +-
 .../highAvailability/progress_controller.js     |    4 +-
 .../progress_popup_controller.js                |    8 +-
 .../rangerAdmin/step4_controller.js             |    2 +-
 .../main/admin/kerberos/step2_controller.js     |    7 +-
 .../main/admin/stack_and_upgrade_controller.js  |  234 +-
 .../main/host/bulk_operations_controller.js     |    2 +-
 ambari-web/app/controllers/main/host/details.js |  268 +-
 ambari-web/app/controllers/main/service/item.js |   20 +-
 .../service/manage_config_groups_controller.js  |   63 +-
 .../main/service/reassign/step1_controller.js   |   13 +-
 .../main/service/reassign/step4_controller.js   |   10 +-
 .../main/service/reassign_controller.js         |    7 +-
 ambari-web/app/controllers/wizard.js            |    6 +-
 .../app/controllers/wizard/step1_controller.js  |    2 +
 .../app/controllers/wizard/step2_controller.js  |    4 +-
 .../app/controllers/wizard/step6_controller.js  |   15 +-
 .../wizard/step7/assign_master_controller.js    |    5 +-
 .../app/controllers/wizard/step7_controller.js  |   10 +-
 .../app/controllers/wizard/step8_controller.js  |   38 +-
 .../configs/services/ambari_infra_properties.js |   31 +-
 .../configs/services/logsearch_properties.js    |   97 +-
 ambari-web/app/mappers/hosts_mapper.js          |   14 +-
 .../app/mappers/repository_version_mapper.js    |   21 +-
 ambari-web/app/mappers/service_mapper.js        |    6 +-
 .../app/mappers/service_metrics_mapper.js       |    3 +-
 ambari-web/app/mappers/stack_mapper.js          |    5 +-
 ambari-web/app/mappers/stack_service_mapper.js  |    1 +
 .../app/mappers/stack_upgrade_history_mapper.js |    1 +
 ambari-web/app/mappers/stack_version_mapper.js  |    9 +-
 ambari-web/app/messages.js                      |   52 +-
 .../configs/config_recommendation_parser.js     |   29 +-
 ...onfig_with_override_recommendation_parser.js |    7 +-
 .../app/mixins/common/configs/configs_saver.js  |   47 +-
 .../mixins/common/configs/enhanced_configs.js   |   36 +-
 .../mixins/common/table_server_view_mixin.js    |    2 +-
 .../app/mixins/common/widgets/widget_mixin.js   |    2 +-
 .../main/host/details/actions/check_host.js     |    4 +-
 .../configs/component_actions_by_configs.js     |   10 +-
 .../main/service/configs/config_overridable.js  |    3 +-
 .../mixins/wizard/assign_master_components.js   |   23 +-
 .../app/mixins/wizard/wizardHostsLoading.js     |    2 +-
 .../wizard/wizardProgressPageController.js      |   20 +-
 .../models/configs/objects/service_config.js    |    4 +-
 .../configs/objects/service_config_property.js  |    8 +
 ambari-web/app/models/host.js                   |    2 +
 ambari-web/app/models/operating_system.js       |    3 +-
 ambari-web/app/models/repository.js             |    4 +-
 ambari-web/app/models/service.js                |    1 +
 ambari-web/app/models/stack.js                  |    4 +-
 .../app/models/stack_service_component.js       |    3 +-
 .../models/stack_version/repository_version.js  |   16 +-
 .../app/models/stack_version/service_simple.js  |    4 +-
 .../stack_version/stack_upgrade_history.js      |    1 +
 ambari-web/app/models/stack_version/version.js  |    2 +
 ambari-web/app/routes/installer.js              |    4 +-
 ambari-web/app/routes/stack_upgrade_routes.js   |   15 +-
 ambari-web/app/styles/application.less          |   12 +
 ambari-web/app/styles/modal_popups.less         |   11 +
 ambari-web/app/styles/stack_versions.less       |  202 +-
 ambari-web/app/templates/common/breadcrumbs.hbs |    6 +-
 .../common/configs/overriddenProperty.hbs       |    4 +-
 .../templates/common/host_progress_popup.hbs    |   27 +-
 .../modal_popups/install_repo_confirmation.hbs  |   32 +
 .../revert_patch_upgrade_confirmation.hbs       |   48 +
 .../stack_upgrade/stack_upgrade_wizard.hbs      |  180 +-
 .../main/admin/stack_upgrade/upgrade_group.hbs  |   18 +-
 .../admin/stack_upgrade/upgrade_history.hbs     |   49 +-
 .../admin/stack_upgrade/upgrade_options.hbs     |   47 +-
 .../admin/stack_upgrade/upgrade_version_box.hbs |   11 +-
 .../stack_upgrade/upgrade_version_column.hbs    |   39 +-
 .../main/admin/stack_upgrade/versions.hbs       |    2 +-
 ambari-web/app/templates/main/host/details.hbs  |    3 +
 .../main/host/details/recoverHostErrorPopup.hbs |   23 +
 .../main/host/details/recoverHostPopup.hbs      |   22 +
 ambari-web/app/templates/wizard/step1.hbs       |  326 +-
 ambari-web/app/templates/wizard/step2.hbs       |    5 +
 ambari-web/app/utils/ajax/ajax.js               |   38 +-
 .../app/utils/batch_scheduled_requests.js       |    2 +-
 ambari-web/app/utils/db.js                      |    8 -
 ambari-web/app/utils/helper.js                  |   11 +-
 ambari-web/app/utils/host_progress_popup.js     |  121 +-
 ambari-web/app/utils/hosts.js                   |    5 +-
 ambari-web/app/views/common/breadcrumbs_view.js |   24 +-
 .../views/common/configs/config_history_flow.js |    7 +-
 .../app/views/common/configs/controls_view.js   |    2 +-
 .../configs/overriddenPropertyRow_view.js       |    3 +
 .../views/common/configs/service_config_view.js |    3 +-
 .../configs/service_configs_by_category_view.js |   16 +-
 ambari-web/app/views/common/controls_view.js    |    4 +-
 .../views/common/helpers/status_icon_view.js    |    1 +
 .../common/host_progress_popup_body_view.js     |  316 +-
 .../app/views/common/quick_view_link_view.js    |    6 +
 ambari-web/app/views/common/sort_view.js        |   26 +-
 .../admin/stack_upgrade/upgrade_history_view.js |   57 +-
 .../stack_upgrade/upgrade_version_box_view.js   |  215 +-
 .../upgrade_version_column_view.js              |   73 +-
 .../admin/stack_upgrade/upgrade_wizard_view.js  |   37 +-
 .../main/admin/stack_upgrade/versions_view.js   |   18 +-
 .../app/views/main/alert_definitions_view.js    |   10 +-
 .../views/main/dashboard/config_history_view.js |   25 +-
 ambari-web/app/views/main/host.js               |   10 +
 .../views/main/service/reassign/step1_view.js   |   25 +-
 .../app/views/main/service/reassign_view.js     |    2 +-
 ambari-web/app/views/wizard/step1_view.js       |    6 +-
 ambari-web/app/views/wizard/step2_view.js       |   21 +
 .../global/background_operations_test.js        |    4 +-
 .../global/cluster_controller_test.js           |   21 +-
 ambari-web/test/controllers/installer_test.js   |   37 +-
 .../journalNode/progress_controller_test.js     |    2 -
 .../progress_controller_test.js                 |    3 -
 .../progress_popup_controller_test.js           |    2 +
 .../admin/kerberos/step2_controller_test.js     |    9 +-
 .../admin/stack_and_upgrade_controller_test.js  |  225 +-
 .../test/controllers/main/host/details_test.js  |   59 +-
 .../manage_config_groups_controller_test.js     |   89 +-
 .../service/reassign/step1_controller_test.js   |    2 +
 .../service/reassign/step4_controller_test.js   |   12 +-
 .../test/controllers/wizard/step1_test.js       |    2 +
 .../step7/assign_master_controller_test.js      |   23 +
 .../test/controllers/wizard/step7_test.js       |   11 +
 .../test/controllers/wizard/step8_test.js       |    4 +-
 ambari-web/test/controllers/wizard_test.js      |    4 +-
 .../mixins/common/configs/configs_saver_test.js |   98 +-
 .../common/configs/enhanced_configs_test.js     |   61 +-
 .../component_actions_by_configs_test.js        |   10 +-
 .../service/configs/config_overridable_test.js  |    2 +
 .../configs/objects/service_config_test.js      |   14 +-
 ambari-web/test/utils/helper_test.js            |    5 +
 .../test/utils/host_progress_popup_test.js      |    7 +-
 ambari-web/test/utils/hosts_test.js             |    3 +-
 .../test/views/common/breadcrumbs_view_test.js  |   30 +-
 .../host_progress_popup_body_view_test.js       |  641 +-
 .../test/views/common/quick_link_view_test.js   |   50 +
 ambari-web/test/views/common/sort_view_test.js  |   49 +-
 .../resourceManager/wizard_view_test.js         |   11 +-
 .../stack_upgrade/upgrade_history_view_test.js  |  131 +-
 .../upgrade_version_box_view_test.js            |  352 +-
 .../upgrade_version_column_view_test.js         |  193 +
 .../stack_upgrade/upgrade_wizard_view_test.js   |   18 +-
 .../admin/stack_upgrade/version_view_test.js    |  133 +-
 .../views/main/alert_definitions_view_test.js   |   71 +-
 .../main/dashboard/config_history_view_test.js  |    1 +
 .../views/main/service/reassign_view_test.js    |   13 +-
 ambari-web/test/views/wizard/step1_view_test.js |   50 +-
 .../common-services/NIFI/1.0.0/kerberos.json    |    6 +-
 .../HDF/2.0/hooks/after-INSTALL/scripts/hook.py |    2 +-
 .../2.0/hooks/after-INSTALL/scripts/params.py   |    3 -
 .../scripts/shared_initialization.py            |   35 +-
 .../scripts/shared_initialization.py            |   14 -
 .../HDF/2.0/properties/stack_features.json      |    6 -
 .../stacks/HDF/2.0/services/KAFKA/kerberos.json |    6 +-
 .../MICROSOFT_R_SERVER/8.0.5/kerberos.json      |    6 +-
 .../2.0/hooks/after-INSTALL/scripts/hook.py     |    2 +-
 .../2.0/hooks/after-INSTALL/scripts/params.py   |    3 -
 .../scripts/shared_initialization.py            |   31 +-
 .../scripts/shared_initialization.py            |   14 -
 .../stacks/ODPi/2.0/services/HIVE/kerberos.json |   18 +-
 .../HIVE/package/scripts/hive_metastore.py      |    7 +-
 .../HIVE/package/scripts/hive_server_upgrade.py |   21 +-
 .../HIVE/package/scripts/hive_service.py        |    2 +-
 .../HIVE/package/scripts/params_linux.py        |   11 +-
 .../stacks/ODPi/2.0/services/YARN/kerberos.json |   30 +-
 .../YARN/package/scripts/params_linux.py        |    5 +-
 .../2.0/services/YARN/package/scripts/yarn.py   |   16 +-
 .../stacks/ODPi/2.0/services/stack_advisor.py   |    7 +
 contrib/version-builder/example.py              |    1 +
 contrib/version-builder/example.sh              |    3 +-
 contrib/version-builder/version_builder.py      |   17 +-
 .../view/commons/hdfs/FileOperationService.java |   41 +-
 .../resources/ui/app/components/file-search.js  |   10 +-
 .../main/resources/ui/app/controllers/files.js  |   20 +-
 .../src/main/resources/ui/app/routes/files.js   |   16 +-
 .../ui/app/templates/components/file-row.hbs    |    2 +-
 .../ui/app/templates/components/file-search.hbs |    2 +-
 .../main/resources/ui/app/templates/files.hbs   |    8 +-
 .../view/filebrowser/FilebrowserTest.java       |    4 +-
 .../ui/hive-web/app/adapters/application.js     |   18 +
 .../app/controllers/visualization-ui.js         |   10 +-
 .../ui/hive-web/app/utils/constants.js          |    1 +
 .../savedQueries/SavedQueryResourceManager.java |   17 +-
 .../resources/ui/app/components/job-item.js     |   49 +-
 .../resources/ui/app/components/jobs-browser.js |    3 +
 .../ui/app/components/query-result-table.js     |    2 +-
 .../src/main/resources/ui/app/routes/jobs.js    |   11 +
 .../main/resources/ui/app/routes/queries/new.js |    2 +
 .../resources/ui/app/routes/queries/query.js    |  240 +-
 .../src/main/resources/ui/app/services/jobs.js  |   31 +-
 .../src/main/resources/ui/app/styles/app.scss   |    8 +
 .../ui/app/templates/components/job-item.hbs    |    2 +-
 .../app/templates/components/jobs-browser.hbs   |    2 +-
 .../main/resources/ui/app/templates/jobs.hbs    |    1 +
 .../hive20/src/main/resources/ui/yarn.lock      |    2 +-
 .../ambari/view/utils/hdfs/DirListInfo.java     |   97 +
 .../ambari/view/utils/hdfs/DirStatus.java       |   75 +
 .../apache/ambari/view/utils/hdfs/HdfsApi.java  |  126 +-
 .../ambari/view/utils/hdfs/HdfsApiTest.java     |  201 +
 docs/pom.xml                                    |   14 +-
 docs/src/site/apt/index.apt                     |    2 +-
 docs/src/site/apt/whats-new.apt                 |    6 +-
 docs/src/site/site.xml                          |    2 +
 pom.xml                                         |    2 +-
 2265 files changed, 95092 insertions(+), 107760 deletions(-)
----------------------------------------------------------------------



[21/50] [abbrv] ambari git commit: AMBARI-22081. UI is crashing while upgrading when services array is not present (alexantonenko)

Posted by rl...@apache.org.
AMBARI-22081. UI is crashing while upgrading when services array is not present (alexantonenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c7c62c09
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c7c62c09
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c7c62c09

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: c7c62c0974331790ad380799f2f59cd2d92ccd0a
Parents: b10b6fd
Author: Alex Antonenko <aa...@hortonworks.com>
Authored: Thu Sep 28 15:18:19 2017 +0300
Committer: Alex Antonenko <aa...@hortonworks.com>
Committed: Thu Sep 28 15:18:19 2017 +0300

----------------------------------------------------------------------
 ambari-web/app/mappers/repository_version_mapper.js | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/c7c62c09/ambari-web/app/mappers/repository_version_mapper.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/mappers/repository_version_mapper.js b/ambari-web/app/mappers/repository_version_mapper.js
index 61056ae..3838d34 100644
--- a/ambari-web/app/mappers/repository_version_mapper.js
+++ b/ambari-web/app/mappers/repository_version_mapper.js
@@ -122,7 +122,7 @@ App.repoVersionMapper = App.QuickDataMapper.create({
                 name: service.name,
                 display_name: service.display_name,
                 latest_version: service.versions[0] ? service.versions[0] : '',
-                is_available: item[repoVersionsKey].services.someProperty('name', service.name),
+                is_available: item[repoVersionsKey].services ? item[repoVersionsKey].services.someProperty( 'name', service.name) : true,
                 is_upgradable: json.stackServices ? json.stackServices[service.name] && json.stackServices[service.name].upgrade : true
               };
               resultService.push(serviceObj);


[25/50] [abbrv] ambari git commit: BUG-89063. Make dfs.permissions.superusergroup as group property (echekanskiy)

Posted by rl...@apache.org.
BUG-89063. Make dfs.permissions.superusergroup as group property (echekanskiy)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8e0f782e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8e0f782e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8e0f782e

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 8e0f782efd4694028b598106e68ebe2a1c7c0a2e
Parents: fb4115e
Author: Eugene Chekanskiy <ec...@apache.org>
Authored: Thu Sep 28 19:58:58 2017 +0300
Committer: Eugene Chekanskiy <ec...@apache.org>
Committed: Thu Sep 28 19:58:58 2017 +0300

----------------------------------------------------------------------
 .../common-services/HDFS/2.1.0.2.0/configuration/hadoop-env.xml  | 4 ++++
 .../common-services/HDFS/2.1.0.2.0/configuration/hdfs-site.xml   | 1 +
 .../common-services/HDFS/3.0.0.3.0/configuration/hadoop-env.xml  | 4 ++++
 .../common-services/HDFS/3.0.0.3.0/configuration/hdfs-site.xml   | 1 +
 4 files changed, 10 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/8e0f782e/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hadoop-env.xml b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hadoop-env.xml
index 0f36e0b..660ab63 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hadoop-env.xml
@@ -198,6 +198,10 @@
           <type>cluster-env</type>
           <name>user_group</name>
         </property>
+        <property>
+          <type>hdfs-site</type>
+          <name>dfs.permissions.superusergroup</name>
+        </property>
       </user-groups>
     </value-attributes>
     <on-ambari-upgrade add="true"/>

http://git-wip-us.apache.org/repos/asf/ambari/blob/8e0f782e/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hdfs-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hdfs-site.xml b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hdfs-site.xml
index 4eab367..7fdc227 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hdfs-site.xml
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hdfs-site.xml
@@ -335,6 +335,7 @@
   <property>
     <name>dfs.permissions.superusergroup</name>
     <value>hdfs</value>
+    <property-type>GROUP</property-type>
     <description>The name of the group of super-users.</description>
     <on-ambari-upgrade add="true"/>
   </property>

http://git-wip-us.apache.org/repos/asf/ambari/blob/8e0f782e/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/configuration/hadoop-env.xml b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/configuration/hadoop-env.xml
index 4154007..2ce3f84 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/configuration/hadoop-env.xml
@@ -198,6 +198,10 @@
           <type>cluster-env</type>
           <name>user_group</name>
         </property>
+        <property>
+          <type>hdfs-site</type>
+          <name>dfs.permissions.superusergroup</name>
+        </property>
       </user-groups>
     </value-attributes>
     <on-ambari-upgrade add="false"/>

http://git-wip-us.apache.org/repos/asf/ambari/blob/8e0f782e/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/configuration/hdfs-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/configuration/hdfs-site.xml b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/configuration/hdfs-site.xml
index a4fed0f..5c28527 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/configuration/hdfs-site.xml
+++ b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/configuration/hdfs-site.xml
@@ -332,6 +332,7 @@
   <property>
     <name>dfs.permissions.superusergroup</name>
     <value>hdfs</value>
+    <property-type>GROUP</property-type>
     <description>The name of the group of super-users.</description>
     <on-ambari-upgrade add="false"/>
   </property>


[14/50] [abbrv] ambari git commit: AMBARI-17551 AssignSlaves page doesnt let choose NodeManager evethough it complaints to add atleast one. (atkach)

Posted by rl...@apache.org.
AMBARI-17551 AssignSlaves page doesnt let choose NodeManager evethough it complaints to add atleast one. (atkach)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/407eb543
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/407eb543
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/407eb543

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 407eb543577f460f319822504d8cc9558ae5aabc
Parents: 81354ff
Author: Andrii Tkach <at...@apache.org>
Authored: Wed Sep 27 13:21:17 2017 +0300
Committer: Andrii Tkach <at...@apache.org>
Committed: Wed Sep 27 16:47:23 2017 +0300

----------------------------------------------------------------------
 .../app/controllers/wizard/step6_controller.js       | 15 +++++++++------
 1 file changed, 9 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/407eb543/ambari-web/app/controllers/wizard/step6_controller.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/wizard/step6_controller.js b/ambari-web/app/controllers/wizard/step6_controller.js
index fb26dc2..68ec3ed 100644
--- a/ambari-web/app/controllers/wizard/step6_controller.js
+++ b/ambari-web/app/controllers/wizard/step6_controller.js
@@ -708,8 +708,6 @@ App.WizardStep6Controller = Em.Controller.extend(App.HostComponentValidationMixi
    * @override App.HostComponentRecommendationMixin
    */
   updateValidationsSuccessCallback: function (data) {
-    var self = this;
-
     var clientComponents = App.get('components.clients');
 
     this.set('generalErrorMessages', []);
@@ -729,7 +727,7 @@ App.WizardStep6Controller = Em.Controller.extend(App.HostComponentValidationMixi
     }).forEach(function (item) {
       var checkboxWithIssue = null;
       var isGeneralClientValidationItem = clientComponents.contains(item['component-name']); // it is an error/warning for any client component (under "CLIENT" alias)
-      var host = self.get('hosts').find(function (h) {
+      var host = this.get('hosts').find(function (h) {
         return h.hostName === item.host && h.checkboxes.some(function (checkbox) {
           var isClientComponent = checkbox.component === "CLIENT" && isGeneralClientValidationItem;
           if (checkbox.component === item['component-name'] || isClientComponent) {
@@ -753,6 +751,11 @@ App.WizardStep6Controller = Em.Controller.extend(App.HostComponentValidationMixi
           }
       }
       else {
+        var componentHeader = this.get('headers').findProperty('name', item['component-name']);
+        if (componentHeader && componentHeader.get('isDisabled')) {
+          // skip validation messages for components which disabled for editing
+          return;
+        }
         var component;
         if (isGeneralClientValidationItem) {
           if (!anyGeneralClientErrors) {
@@ -771,15 +774,15 @@ App.WizardStep6Controller = Em.Controller.extend(App.HostComponentValidationMixi
           }
 
           if (item.level === 'ERROR') {
-            self.get('generalErrorMessages').push(item.message + details);
+            this.get('generalErrorMessages').push(item.message + details);
           }
           else
             if (item.level === 'WARN') {
-              self.get('generalWarningMessages').push(item.message + details);
+              this.get('generalWarningMessages').push(item.message + details);
             }
         }
       }
-    });
+    }, this);
   },
 
   /**


[43/50] [abbrv] ambari git commit: AMBARI-22096.Entries related to hive query are found in RM UI after killing or stopping the execution of the query.(Venkata Sairam)

Posted by rl...@apache.org.
AMBARI-22096.Entries related to hive query are found in RM UI after killing or stopping the execution of the query.(Venkata Sairam)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a66e2deb
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a66e2deb
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a66e2deb

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: a66e2deba88aeaa33af868551b31128bcc2b4ce8
Parents: 7e6910f7
Author: Venkata Sairam <ve...@gmail.com>
Authored: Sat Sep 30 18:07:30 2017 +0530
Committer: Venkata Sairam <ve...@gmail.com>
Committed: Sat Sep 30 18:07:30 2017 +0530

----------------------------------------------------------------------
 .../resources/ui/app/routes/queries/query.js    | 13 ++++++--
 .../src/main/resources/ui/app/services/jobs.js  | 31 ++++++++++++++++++--
 2 files changed, 38 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/a66e2deb/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
index 01e1497..3e5adc1 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
@@ -405,6 +405,7 @@ export default Ember.Route.extend(UILoggerMixin, {
         self.get('controller.model').set('currentJobData', data);
         self.get('controller.model').set('queryFile', data.job.queryFile);
         self.get('controller.model').set('logFile', data.job.logFile);
+        self.get('controller').set('currentJobId', data.job.id);
         self.get('controller.model').set('currentJobId', data.job.id);
         ctrlrModel.set('isJobCreated',true);
         ctrlr.set('isJobCreated',true);
@@ -442,9 +443,15 @@ export default Ember.Route.extend(UILoggerMixin, {
     },
 
     stopQuery(){
-      let jobId = this.get('controller.model').get('currentJobId');
-      this.get('jobs').stopJob(jobId)
-        .then( data => this.get('controller').set('isJobCancelled', true));
+      Ember.run.later(() => {
+        let jobId = this.get('controller').get('currentJobId'), self = this, ctrlr = self.get('controller'), ctrlrModel = self.get('controller.model');
+        this.get('jobs').stopJob(jobId)
+          .then( data => {
+             this.get('controller').set('isJobCancelled', true);
+          }).catch(function (response) {
+             self.get('controller').set('isJobCancelled', true);
+          });
+      }, 1000);
     },
 
     showVisualExplain(payloadTitle){

http://git-wip-us.apache.org/repos/asf/ambari/blob/a66e2deb/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js b/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js
index 36abf49..dd9db00 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/services/jobs.js
@@ -20,6 +20,7 @@ import Ember from 'ember';
 
 export default Ember.Service.extend({
   store: Ember.inject.service(),
+  isCurrentQueryCancelled: false,
   getQuery(jobId) {
     let job = this.get('store').peekRecord('job', jobId);
     if (job) {
@@ -31,6 +32,11 @@ export default Ember.Service.extend({
 
     return new Ember.RSVP.Promise((resolve, reject) => {
       Ember.run.later(() => {
+        if(this.get('isCurrentQueryCancelled')) {
+         this.resetCurrentQueryStatus();
+         reject('error');
+         return;
+        }
         this.get('store').findRecord('job', jobId, {reload: true})
           .then((job) => {
             let status = job.get('status').toLowerCase();
@@ -64,10 +70,29 @@ export default Ember.Service.extend({
   },
 
   stopJob : function(jobId) {
-    return this.get('store').findRecord('job', jobId)
-      .then(job => job.destroyRecord());
+    this.setCurrentQueryAsCancelled();
+    return new Ember.RSVP.Promise((resolve, reject) => {
+      let job = this.get('store').peekRecord('job', jobId);
+      if(job) {
+       job.destroyRecord();
+      }
+       else {
+        this.get('store').findRecord('job', jobId, { reload: true })
+          .then(job => {
+           job.deleteRecord();
+           return resolve("");
+         }).catch(function (response) {
+           return resolve("");
+         });
+      }
+    });
+  },
+  setCurrentQueryAsCancelled() {
+    this.set('isCurrentQueryCancelled', true);
+  },
+  resetCurrentQueryStatus() {
+    this.set('isCurrentQueryCancelled', false);
   },
-
   _fetchDummyResult(jobId) {
     this.get('store').adapterFor('job').fetchResult(jobId);
   },


[04/50] [abbrv] ambari git commit: AMBARI-22056. Solr Data Manager script should use gzip compression type (mgergely)

Posted by rl...@apache.org.
AMBARI-22056. Solr Data Manager script should use gzip compression type (mgergely)

Change-Id: Ib61f1a03a885a2c81c11b32e5952c3c328a4064f


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a8736260
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a8736260
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a8736260

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: a8736260897c937bab1318ea4991d1edd1aae1eb
Parents: 7af3152
Author: Miklos Gergely <mg...@hortonworks.com>
Authored: Tue Sep 26 16:41:43 2017 +0200
Committer: Miklos Gergely <mg...@hortonworks.com>
Committed: Tue Sep 26 16:41:59 2017 +0200

----------------------------------------------------------------------
 .../src/main/python/solrDataManager.py          | 35 +++++++++++++-------
 1 file changed, 23 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/a8736260/ambari-infra/ambari-infra-solr-client/src/main/python/solrDataManager.py
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-solr-client/src/main/python/solrDataManager.py b/ambari-infra/ambari-infra-solr-client/src/main/python/solrDataManager.py
index 18a4da7..e0356bb 100644
--- a/ambari-infra/ambari-infra-solr-client/src/main/python/solrDataManager.py
+++ b/ambari-infra/ambari-infra-solr-client/src/main/python/solrDataManager.py
@@ -32,6 +32,8 @@ from subprocess import call, Popen, PIPE
 from urllib import quote, unquote
 from zipfile import ZipFile, ZIP_DEFLATED
 import tarfile
+import gzip
+import shutil
 
 VERSION = "1.0"
 
@@ -69,7 +71,7 @@ def parse_arguments():
   parser.add_option("-g", "--ignore-unfinished-uploading", dest="ignore_unfinished_uploading", action="store_true", default=False)
   
   parser.add_option("--json-file", dest="json_file", help="create a json file instead of line delimited json", action="store_true", default=False)
-  parser.add_option("-z", "--compression", dest="compression", help="none | tar.gz | tar.bz2 | zip", default="tar.gz")
+  parser.add_option("-z", "--compression", dest="compression", help="none | tar.gz | tar.bz2 | zip | gz", default="gz")
   
   parser.add_option("-k", "--solr-keytab", dest="solr_keytab", type="string", help="the keytab for a kerberized solr")
   parser.add_option("-n", "--solr-principal", dest="solr_principal", type="string", help="the principal for a kerberized solr")
@@ -122,7 +124,7 @@ def parse_arguments():
     parser.print_help()
     sys.exit()
 
-  compression_values = ["none", "tar.gz", "tar.bz2", "zip"]
+  compression_values = ["none", "tar.gz", "tar.bz2", "zip", "gz"]
   if options.compression not in compression_values:
     print "compression must be one of {0}".format(" | ".join(compression_values))
     parser.print_help()
@@ -469,35 +471,44 @@ def upload_block(solr_kinit_command, hdfs_kinit_command, curl_prefix, solr_url,
   os.remove("{0}/command.json".format(working_dir))
 
 def compress_file(working_dir, tmp_file_path, file_name, compression):
+  data_file_name = "{0}.json".format(file_name)
   if compression == "none":
     upload_file_path = "{0}/{1}.json".format(working_dir, file_name)
     os.rename(tmp_file_path, upload_file_path)
   elif compression == "tar.gz":
-    upload_file_path = "{0}/{1}.tar.gz".format(working_dir, file_name)
-    zipped_file_name = "{0}.json".format(file_name)
+    upload_file_path = "{0}/{1}.json.tar.gz".format(working_dir, file_name)
     tar = tarfile.open(upload_file_path, mode="w:gz")
     try:
-      tar.add(tmp_file_path, arcname=zipped_file_name)
+      tar.add(tmp_file_path, arcname=data_file_name)
     finally:
       tar.close()
   elif compression == "tar.bz2":
-    upload_file_path = "{0}/{1}.tar.bz2".format(working_dir, file_name)
-    zipped_file_name = "{0}.json".format(file_name)
+    upload_file_path = "{0}/{1}.json.tar.bz2".format(working_dir, file_name)
     tar = tarfile.open(upload_file_path, mode="w:bz2")
     try:
-      tar.add(tmp_file_path, arcname=zipped_file_name)
+      tar.add(tmp_file_path, arcname=data_file_name)
     finally:
       tar.close()
   elif compression == "zip":
-    upload_file_path = "{0}/{1}.zip".format(working_dir, file_name)
-    zipped_file_name = "{0}.json".format(file_name)
+    upload_file_path = "{0}/{1}.json.zip".format(working_dir, file_name)
     zip = ZipFile(upload_file_path, 'w')
-    zip.write(tmp_file_path, zipped_file_name, ZIP_DEFLATED)
-    logger.info("Created file %s", zipped_file_name)
+    zip.write(tmp_file_path, data_file_name, ZIP_DEFLATED)
+  elif compression == "gz":
+    upload_file_path = "{0}/{1}.json.gz".format(working_dir, file_name)
+    gz = gzip.open(upload_file_path, mode="wb")
+    f = open(tmp_file_path)
+    try:
+      shutil.copyfileobj(f, gz)
+    finally:
+      gz.close()
+      f.close()
   else:
     logger.warn("Unknown compression type")
     sys.exit()
   
+  logger.info("Created data file %s", data_file_name)
+
+  
   return upload_file_path
 
 def create_command_file(upload, working_dir, upload_file_path, solr_url, collection, filter_field, id_field, prev_lot_end_value,


[30/50] [abbrv] ambari git commit: AMBARI-22079. Upgrade Yarn version for Logsearch Web (Istvan Tobias via oleewere)

Posted by rl...@apache.org.
AMBARI-22079. Upgrade Yarn version for Logsearch Web (Istvan Tobias via oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/504094e5
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/504094e5
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/504094e5

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 504094e5bc697373b233683e0697de63872ce101
Parents: 8852edd
Author: Istvan Tobias <to...@gmail.com>
Authored: Fri Sep 29 15:50:20 2017 +0200
Committer: Oliver Szabo <ol...@gmail.com>
Committed: Fri Sep 29 15:50:20 2017 +0200

----------------------------------------------------------------------
 .../ambari-logsearch-web/package.json           |  17 +-
 ambari-logsearch/ambari-logsearch-web/pom.xml   |   6 +-
 ambari-logsearch/ambari-logsearch-web/yarn.lock | 288 ++++++++++++++-----
 3 files changed, 235 insertions(+), 76 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/504094e5/ambari-logsearch/ambari-logsearch-web/package.json
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/package.json b/ambari-logsearch/ambari-logsearch-web/package.json
index 7cf636f..f8525c4 100644
--- a/ambari-logsearch/ambari-logsearch-web/package.json
+++ b/ambari-logsearch/ambari-logsearch-web/package.json
@@ -1,7 +1,7 @@
 {
   "name": "ambari-logsearch-web",
   "version": "3.0.0",
-  "license": "Apache 2.0",
+  "license": "Apache-2.0",
   "scripts": {
     "ng": "ng",
     "start": "webpack-dev-server --port=4200",
@@ -22,7 +22,7 @@
     "@angular/platform-browser-dynamic": "^4.0.0",
     "@angular/router": "^4.0.0",
     "@ngrx/core": "^1.2.0",
-    "@ngrx/store": "^2.2.2",
+    "@ngrx/store": "^2.2.3",
     "@ngx-translate/core": "^6.0.1",
     "@ngx-translate/http-loader": "^0.0.3",
     "angular-moment-timezone": "^0.2.1",
@@ -36,12 +36,13 @@
     "moment-timezone": "^0.5.13",
     "ng2-auto-complete": "^0.12.0",
     "ngx-bootstrap": "^1.6.6",
-    "rxjs": "^5.1.0",
+    "rxjs": "^5.4.3",
     "zone.js": "^0.8.4"
   },
   "devDependencies": {
-    "@angular/cli": "^1.4.0",
+    "@angular/cli": "^1.4.3",
     "@angular/compiler-cli": "^4.0.0",
+    "@ngtools/webpack": "^1.7.1",
     "@types/d3": "^4.10.0",
     "@types/jasmine": "2.5.38",
     "@types/jquery": "^1.10.33",
@@ -57,7 +58,7 @@
     "cssnano": "^3.10.0",
     "exports-loader": "^0.6.3",
     "file-loader": "^0.10.0",
-    "html-webpack-plugin": "^2.29.0",
+    "html-webpack-plugin": "^2.30.1",
     "istanbul-instrumenter-loader": "^2.0.0",
     "jasmine-core": "~2.5.2",
     "jasmine-spec-reporter": "~3.2.0",
@@ -78,10 +79,10 @@
     "stylus-loader": "^3.0.1",
     "ts-node": "~2.0.0",
     "tslint": "~4.5.0",
-    "typescript": "~2.2.0",
+    "typescript": "~2.5.0",
     "url-loader": "^0.5.7",
-    "webpack": "~3.5.5",
+    "webpack": "~3.6.0",
     "webpack-concat-plugin": "1.4.0",
-    "webpack-dev-server": "~2.7.1"
+    "webpack-dev-server": "~2.9.0"
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/504094e5/ambari-logsearch/ambari-logsearch-web/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/pom.xml b/ambari-logsearch/ambari-logsearch-web/pom.xml
index f27c2c6..d641d5a 100644
--- a/ambari-logsearch/ambari-logsearch-web/pom.xml
+++ b/ambari-logsearch/ambari-logsearch-web/pom.xml
@@ -29,8 +29,8 @@
 
   <properties>
     <logsearch.npm.config.tmp>/tmp/logsearch_npm_config_tmp</logsearch.npm.config.tmp>
-    <node.version>v6.9.0</node.version>
-    <yarn.version>v0.23.3</yarn.version>
+    <node.version>v8.6.0</node.version>
+    <yarn.version>v1.1.0</yarn.version>
   </properties>
 
   <build>
@@ -38,7 +38,7 @@
       <plugin>
         <groupId>com.github.eirslett</groupId>
         <artifactId>frontend-maven-plugin</artifactId>
-        <version>1.4</version>
+        <version>1.6</version>
         <configuration>
           <nodeVersion>${node.version}</nodeVersion>
           <yarnVersion>${yarn.version}</yarnVersion>

http://git-wip-us.apache.org/repos/asf/ambari/blob/504094e5/ambari-logsearch/ambari-logsearch-web/yarn.lock
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/yarn.lock b/ambari-logsearch/ambari-logsearch-web/yarn.lock
index c55bb4d..c005503 100644
--- a/ambari-logsearch/ambari-logsearch-web/yarn.lock
+++ b/ambari-logsearch/ambari-logsearch-web/yarn.lock
@@ -23,14 +23,14 @@
     minimist "^1.2.0"
     rxjs "^5.4.2"
 
-"@angular/cli@^1.4.0":
-  version "1.4.0"
-  resolved "https://registry.yarnpkg.com/@angular/cli/-/cli-1.4.0.tgz#c7b6f87c793e8147a4a4f5b472245b942be10556"
+"@angular/cli@^1.4.3":
+  version "1.4.3"
+  resolved "https://registry.yarnpkg.com/@angular/cli/-/cli-1.4.3.tgz#8389d4eeadfe34abb1d16e53836416a8f8430fb3"
   dependencies:
     "@angular-devkit/build-optimizer" "~0.0.18"
     "@angular-devkit/schematics" "~0.0.21"
     "@ngtools/json-schema" "1.1.0"
-    "@ngtools/webpack" "1.7.0"
+    "@ngtools/webpack" "1.7.1"
     "@schematics/angular" "~0.0.30"
     autoprefixer "^6.5.3"
     chalk "^2.0.1"
@@ -87,48 +87,64 @@
     node-sass "^4.3.0"
 
 "@angular/common@^4.0.0":
-  version "4.1.3"
-  resolved "https://registry.yarnpkg.com/@angular/common/-/common-4.1.3.tgz#e7c4791e32131cf74c239428c2a67daab2eef017"
+  version "4.4.3"
+  resolved "https://registry.yarnpkg.com/@angular/common/-/common-4.4.3.tgz#f92ac68b02bec5f0e6d3603a843294dc96c96074"
+  dependencies:
+    tslib "^1.7.1"
 
 "@angular/compiler-cli@^4.0.0":
-  version "4.1.3"
-  resolved "https://registry.yarnpkg.com/@angular/compiler-cli/-/compiler-cli-4.1.3.tgz#c2362ffdf65756471481f839fab675bcac213f96"
+  version "4.4.3"
+  resolved "https://registry.yarnpkg.com/@angular/compiler-cli/-/compiler-cli-4.4.3.tgz#183af81f141186b8d660b06429592d40b7540a4a"
   dependencies:
-    "@angular/tsc-wrapped" "4.1.3"
+    "@angular/tsc-wrapped" "4.4.3"
     minimist "^1.2.0"
     reflect-metadata "^0.1.2"
 
 "@angular/compiler@^4.0.0":
-  version "4.1.3"
-  resolved "https://registry.yarnpkg.com/@angular/compiler/-/compiler-4.1.3.tgz#d2dd30853b0cf4a54758b4a314632c231f9c94c3"
+  version "4.4.3"
+  resolved "https://registry.yarnpkg.com/@angular/compiler/-/compiler-4.4.3.tgz#8f01163dad7db3408497d99d387554b6b185ad66"
+  dependencies:
+    tslib "^1.7.1"
 
 "@angular/core@^4.0.0":
-  version "4.1.3"
-  resolved "https://registry.yarnpkg.com/@angular/core/-/core-4.1.3.tgz#285498eb86ab7d0b6f982f8f9f487ef610013b35"
+  version "4.4.3"
+  resolved "https://registry.yarnpkg.com/@angular/core/-/core-4.4.3.tgz#e71d2b07beaacbab48ab7f51d4e2286ea5d70e15"
+  dependencies:
+    tslib "^1.7.1"
 
 "@angular/forms@^4.0.0":
-  version "4.1.3"
-  resolved "https://registry.yarnpkg.com/@angular/forms/-/forms-4.1.3.tgz#380ab4c3af84c5d1d748c2a7d04151c7dc8e4982"
+  version "4.4.3"
+  resolved "https://registry.yarnpkg.com/@angular/forms/-/forms-4.4.3.tgz#25b41bbab58bf1da872411c8517c10d7c5373d8e"
+  dependencies:
+    tslib "^1.7.1"
 
 "@angular/http@^4.0.0":
-  version "4.1.3"
-  resolved "https://registry.yarnpkg.com/@angular/http/-/http-4.1.3.tgz#eb9d1c302a0172815f9a573310d9be0bdeb845ae"
+  version "4.4.3"
+  resolved "https://registry.yarnpkg.com/@angular/http/-/http-4.4.3.tgz#b557ed24144aacc44b136cd477e84d2f57808903"
+  dependencies:
+    tslib "^1.7.1"
 
 "@angular/platform-browser-dynamic@^4.0.0":
-  version "4.1.3"
-  resolved "https://registry.yarnpkg.com/@angular/platform-browser-dynamic/-/platform-browser-dynamic-4.1.3.tgz#3c13fdcf591d487f6efdc1d46913f280c6d8c2ec"
+  version "4.4.3"
+  resolved "https://registry.yarnpkg.com/@angular/platform-browser-dynamic/-/platform-browser-dynamic-4.4.3.tgz#e41ddd8252432775310eab5940cdd8df0618f084"
+  dependencies:
+    tslib "^1.7.1"
 
 "@angular/platform-browser@^4.0.0":
-  version "4.1.3"
-  resolved "https://registry.yarnpkg.com/@angular/platform-browser/-/platform-browser-4.1.3.tgz#4fa1db5119dd178b315ddae5b329bee1a932a5bd"
+  version "4.4.3"
+  resolved "https://registry.yarnpkg.com/@angular/platform-browser/-/platform-browser-4.4.3.tgz#23f9a45bd3dc7f44d97877fbf8e6032decfc9dcb"
+  dependencies:
+    tslib "^1.7.1"
 
 "@angular/router@^4.0.0":
-  version "4.1.3"
-  resolved "https://registry.yarnpkg.com/@angular/router/-/router-4.1.3.tgz#ddafd46ae7ccc8b1f74904ffb45f394e44625216"
+  version "4.4.3"
+  resolved "https://registry.yarnpkg.com/@angular/router/-/router-4.4.3.tgz#26cc94775a3860946aeaf1c2e8f60f4d44e90991"
+  dependencies:
+    tslib "^1.7.1"
 
-"@angular/tsc-wrapped@4.1.3":
-  version "4.1.3"
-  resolved "https://registry.yarnpkg.com/@angular/tsc-wrapped/-/tsc-wrapped-4.1.3.tgz#2d6372c9187bf1621eacd960b94b39c4f95293cd"
+"@angular/tsc-wrapped@4.4.3":
+  version "4.4.3"
+  resolved "https://registry.yarnpkg.com/@angular/tsc-wrapped/-/tsc-wrapped-4.4.3.tgz#2d3f38210a1d4db03fc86dcf1e095812b85cd119"
   dependencies:
     tsickle "^0.21.0"
 
@@ -136,17 +152,17 @@
   version "1.2.0"
   resolved "https://registry.yarnpkg.com/@ngrx/core/-/core-1.2.0.tgz#882b46abafa2e0e6d887cb71a1b2c2fa3e6d0dc6"
 
-"@ngrx/store@^2.2.2":
-  version "2.2.2"
-  resolved "https://registry.yarnpkg.com/@ngrx/store/-/store-2.2.2.tgz#a00305a6452032a3385886a11ce529dce2dae65b"
+"@ngrx/store@^2.2.3":
+  version "2.2.3"
+  resolved "https://registry.yarnpkg.com/@ngrx/store/-/store-2.2.3.tgz#e7bd1149f1c44208f1cc4744353f0f98a0f1f57b"
 
 "@ngtools/json-schema@1.1.0", "@ngtools/json-schema@^1.1.0":
   version "1.1.0"
   resolved "https://registry.yarnpkg.com/@ngtools/json-schema/-/json-schema-1.1.0.tgz#c3a0c544d62392acc2813a42c8a0dc6f58f86922"
 
-"@ngtools/webpack@1.7.0":
-  version "1.7.0"
-  resolved "https://registry.yarnpkg.com/@ngtools/webpack/-/webpack-1.7.0.tgz#100b4ba370b3b9f991936f3d5db09cebffe11583"
+"@ngtools/webpack@1.7.1", "@ngtools/webpack@^1.7.1":
+  version "1.7.1"
+  resolved "https://registry.yarnpkg.com/@ngtools/webpack/-/webpack-1.7.1.tgz#383ddd689845cf42fc755975f6440f75535f5016"
   dependencies:
     enhanced-resolve "^3.1.0"
     loader-utils "^1.0.2"
@@ -307,8 +323,8 @@
     "@types/d3-selection" "*"
 
 "@types/d3@^4.10.0":
-  version "4.10.0"
-  resolved "https://registry.yarnpkg.com/@types/d3/-/d3-4.10.0.tgz#721d8f984ba4bc192b1c1aca3834b31cc31fa9d3"
+  version "4.10.1"
+  resolved "https://registry.yarnpkg.com/@types/d3/-/d3-4.10.1.tgz#a888ac8780ac241d770b2025b3d7e379c4d417f0"
   dependencies:
     "@types/d3-array" "*"
     "@types/d3-axis" "*"
@@ -354,8 +370,8 @@
   resolved "https://registry.yarnpkg.com/@types/jquery/-/jquery-1.10.33.tgz#6c4c279f8d06839b184950432e880e0a469a64a1"
 
 "@types/moment-timezone@^0.2.34":
-  version "0.2.34"
-  resolved "https://registry.yarnpkg.com/@types/moment-timezone/-/moment-timezone-0.2.34.tgz#948e0aff82742a31dd63714d1aac9616bc375053"
+  version "0.2.35"
+  resolved "https://registry.yarnpkg.com/@types/moment-timezone/-/moment-timezone-0.2.35.tgz#3fbbcb035e66aa5589a9198a6eec8d4dd9a701a5"
   dependencies:
     moment ">=2.14.0"
 
@@ -365,10 +381,14 @@
   dependencies:
     moment "*"
 
-"@types/node@^6.0.46", "@types/node@~6.0.60":
+"@types/node@^6.0.46":
   version "6.0.73"
   resolved "https://registry.yarnpkg.com/@types/node/-/node-6.0.73.tgz#85dc4bb6f125377c75ddd2519a1eeb63f0a4ed70"
 
+"@types/node@~6.0.60":
+  version "6.0.88"
+  resolved "https://registry.yarnpkg.com/@types/node/-/node-6.0.88.tgz#f618f11a944f6a18d92b5c472028728a3e3d4b66"
+
 "@types/q@^0.0.32":
   version "0.0.32"
   resolved "https://registry.yarnpkg.com/@types/q/-/q-0.0.32.tgz#bd284e57c84f1325da702babfc82a5328190c0c5"
@@ -488,12 +508,18 @@ angular-moment-timezone@^0.2.1:
     rxjs "^5.1.0"
     zone.js "^0.8.4"
 
-angular2-moment@^1.3.3, angular2-moment@^1.4.0:
+angular2-moment@^1.3.3:
   version "1.4.0"
   resolved "https://registry.yarnpkg.com/angular2-moment/-/angular2-moment-1.4.0.tgz#3d59c1ebc28934fcfe9b888ab461e261724987e8"
   dependencies:
     moment "^2.16.0"
 
+angular2-moment@^1.4.0:
+  version "1.7.0"
+  resolved "https://registry.yarnpkg.com/angular2-moment/-/angular2-moment-1.7.0.tgz#6e620430cc21efc65ed3a68b5c09f3f3ef7a8256"
+  dependencies:
+    moment "^2.16.0"
+
 ansi-align@^2.0.0:
   version "2.0.0"
   resolved "https://registry.yarnpkg.com/ansi-align/-/ansi-align-2.0.0.tgz#c36aeccba563b89ceb556f3690f0b1d9e3547f7f"
@@ -578,6 +604,13 @@ array-flatten@^2.1.0:
   version "2.1.1"
   resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-2.1.1.tgz#426bb9da84090c1838d812c8150af20a8331e296"
 
+array-includes@^3.0.3:
+  version "3.0.3"
+  resolved "https://registry.yarnpkg.com/array-includes/-/array-includes-3.0.3.tgz#184b48f62d92d7452bb31b323165c7f8bd02266d"
+  dependencies:
+    define-properties "^1.1.2"
+    es-abstract "^1.7.0"
+
 array-slice@^0.2.3:
   version "0.2.3"
   resolved "https://registry.yarnpkg.com/array-slice/-/array-slice-0.2.3.tgz#dd3cfb80ed7973a75117cdac69b0b99ec86186f5"
@@ -1388,10 +1421,14 @@ copy-webpack-plugin@^4.0.1:
     minimatch "^3.0.0"
     node-dir "^0.1.10"
 
-core-js@^2.2.0, core-js@^2.4.0, core-js@^2.4.1:
+core-js@^2.2.0, core-js@^2.4.0:
   version "2.4.1"
   resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.4.1.tgz#4de911e667b0eae9124e34254b53aea6fc618d3e"
 
+core-js@^2.4.1:
+  version "2.5.1"
+  resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.5.1.tgz#ae6874dc66937789b80754ff5428df66819ca50b"
+
 core-object@^3.1.0:
   version "3.1.5"
   resolved "https://registry.yarnpkg.com/core-object/-/core-object-3.1.5.tgz#fa627b87502adc98045e44678e9a8ec3b9c0d2a9"
@@ -1654,7 +1691,7 @@ d3-drag@1, d3-drag@1.1.1:
     d3-dispatch "1"
     d3-selection "1"
 
-d3-dsv@1, d3-dsv@1.0.5:
+d3-dsv@1:
   version "1.0.5"
   resolved "https://registry.yarnpkg.com/d3-dsv/-/d3-dsv-1.0.5.tgz#419f7db47f628789fc3fdb636e678449d0821136"
   dependencies:
@@ -1662,6 +1699,14 @@ d3-dsv@1, d3-dsv@1.0.5:
     iconv-lite "0.4"
     rw "1"
 
+d3-dsv@1.0.7:
+  version "1.0.7"
+  resolved "https://registry.yarnpkg.com/d3-dsv/-/d3-dsv-1.0.7.tgz#137076663f398428fc3d031ae65370522492b78f"
+  dependencies:
+    commander "2"
+    iconv-lite "0.4"
+    rw "1"
+
 d3-ease@1, d3-ease@1.0.3:
   version "1.0.3"
   resolved "https://registry.yarnpkg.com/d3-ease/-/d3-ease-1.0.3.tgz#68bfbc349338a380c44d8acc4fbc3304aa2d8c0e"
@@ -1715,9 +1760,9 @@ d3-random@1.1.0:
   version "1.1.0"
   resolved "https://registry.yarnpkg.com/d3-random/-/d3-random-1.1.0.tgz#6642e506c6fa3a648595d2b2469788a8d12529d3"
 
-d3-request@1.0.5:
-  version "1.0.5"
-  resolved "https://registry.yarnpkg.com/d3-request/-/d3-request-1.0.5.tgz#4daae946d1dd0d57dfe01f022956354958d51f23"
+d3-request@1.0.6:
+  version "1.0.6"
+  resolved "https://registry.yarnpkg.com/d3-request/-/d3-request-1.0.6.tgz#a1044a9ef4ec28c824171c9379fae6d79474b19f"
   dependencies:
     d3-collection "1"
     d3-dispatch "1"
@@ -1756,10 +1801,14 @@ d3-time@1, d3-time@1.0.7:
   version "1.0.7"
   resolved "https://registry.yarnpkg.com/d3-time/-/d3-time-1.0.7.tgz#94caf6edbb7879bb809d0d1f7572bc48482f7270"
 
-d3-timer@1, d3-timer@1.0.6:
+d3-timer@1:
   version "1.0.6"
   resolved "https://registry.yarnpkg.com/d3-timer/-/d3-timer-1.0.6.tgz#4044bf15d7025c06ce7d1149f73cd07b54dbd784"
 
+d3-timer@1.0.7:
+  version "1.0.7"
+  resolved "https://registry.yarnpkg.com/d3-timer/-/d3-timer-1.0.7.tgz#df9650ca587f6c96607ff4e60cc38229e8dd8531"
+
 d3-transition@1, d3-transition@1.1.0:
   version "1.1.0"
   resolved "https://registry.yarnpkg.com/d3-transition/-/d3-transition-1.1.0.tgz#cfc85c74e5239324290546623572990560c3966f"
@@ -1786,8 +1835,8 @@ d3-zoom@1.5.0:
     d3-transition "1"
 
 d3@^4.10.0:
-  version "4.10.0"
-  resolved "https://registry.yarnpkg.com/d3/-/d3-4.10.0.tgz#0bcca3a3b614e2fd45b1b5bd0b9164d57352a862"
+  version "4.10.2"
+  resolved "https://registry.yarnpkg.com/d3/-/d3-4.10.2.tgz#d401b2bc0372a77e6822f278c0e4b4090206babd"
   dependencies:
     d3-array "1.2.0"
     d3-axis "1.0.8"
@@ -1797,7 +1846,7 @@ d3@^4.10.0:
     d3-color "1.0.3"
     d3-dispatch "1.0.3"
     d3-drag "1.1.1"
-    d3-dsv "1.0.5"
+    d3-dsv "1.0.7"
     d3-ease "1.0.3"
     d3-force "1.0.6"
     d3-format "1.2.0"
@@ -1809,13 +1858,13 @@ d3@^4.10.0:
     d3-quadtree "1.0.3"
     d3-queue "3.0.7"
     d3-random "1.1.0"
-    d3-request "1.0.5"
+    d3-request "1.0.6"
     d3-scale "1.0.6"
     d3-selection "1.1.0"
     d3-shape "1.2.0"
     d3-time "1.0.7"
     d3-time-format "2.0.5"
-    d3-timer "1.0.6"
+    d3-timer "1.0.7"
     d3-transition "1.1.0"
     d3-voronoi "1.1.2"
     d3-zoom "1.5.0"
@@ -1884,6 +1933,13 @@ default-require-extensions@^1.0.0:
   dependencies:
     strip-bom "^2.0.0"
 
+define-properties@^1.1.2:
+  version "1.1.2"
+  resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.2.tgz#83a73f2fea569898fb737193c8f873caf6d45c94"
+  dependencies:
+    foreach "^2.0.5"
+    object-keys "^1.0.8"
+
 defined@^1.0.0:
   version "1.0.0"
   resolved "https://registry.yarnpkg.com/defined/-/defined-1.0.0.tgz#c98d9bcef75674188e110969151199e39b1fa693"
@@ -2166,6 +2222,24 @@ error-ex@^1.2.0:
   dependencies:
     is-arrayish "^0.2.1"
 
+es-abstract@^1.7.0:
+  version "1.8.2"
+  resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.8.2.tgz#25103263dc4decbda60e0c737ca32313518027ee"
+  dependencies:
+    es-to-primitive "^1.1.1"
+    function-bind "^1.1.1"
+    has "^1.0.1"
+    is-callable "^1.1.3"
+    is-regex "^1.0.4"
+
+es-to-primitive@^1.1.1:
+  version "1.1.1"
+  resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.1.1.tgz#45355248a88979034b6792e19bb81f2b7975dd0d"
+  dependencies:
+    is-callable "^1.1.1"
+    is-date-object "^1.0.1"
+    is-symbol "^1.0.1"
+
 es5-ext@^0.10.14, es5-ext@^0.10.9, es5-ext@~0.10.14:
   version "0.10.30"
   resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.30.tgz#7141a16836697dbabfaaaeee41495ce29f52c939"
@@ -2548,6 +2622,10 @@ for-own@^1.0.0:
   dependencies:
     for-in "^1.0.1"
 
+foreach@^2.0.5:
+  version "2.0.5"
+  resolved "https://registry.yarnpkg.com/foreach/-/foreach-2.0.5.tgz#0bee005018aeb260d0a3af3ae658dd0136ec1b99"
+
 forever-agent@~0.6.1:
   version "0.6.1"
   resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91"
@@ -2631,7 +2709,7 @@ fstream@^1.0.0, fstream@^1.0.10, fstream@^1.0.2:
     mkdirp ">=0.5 0"
     rimraf "2"
 
-function-bind@^1.0.2:
+function-bind@^1.0.2, function-bind@^1.1.1:
   version "1.1.1"
   resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d"
 
@@ -2950,7 +3028,7 @@ html-minifier@^3.2.3:
     relateurl "0.2.x"
     uglify-js "3.0.x"
 
-html-webpack-plugin@^2.29.0:
+html-webpack-plugin@^2.29.0, html-webpack-plugin@^2.30.1:
   version "2.30.1"
   resolved "https://registry.yarnpkg.com/html-webpack-plugin/-/html-webpack-plugin-2.30.1.tgz#7f9c421b7ea91ec460f56527d78df484ee7537d5"
   dependencies:
@@ -3101,7 +3179,7 @@ ini@^1.3.4, ini@~1.3.0:
   version "1.3.4"
   resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.4.tgz#0537cb79daf59b59a1a517dff706c86ec039162e"
 
-internal-ip@^1.2.0:
+internal-ip@1.2.0, internal-ip@^1.2.0:
   version "1.2.0"
   resolved "https://registry.yarnpkg.com/internal-ip/-/internal-ip-1.2.0.tgz#ae9fbf93b984878785d50a8de1b356956058cf5c"
   dependencies:
@@ -3153,6 +3231,14 @@ is-builtin-module@^1.0.0:
   dependencies:
     builtin-modules "^1.0.0"
 
+is-callable@^1.1.1, is-callable@^1.1.3:
+  version "1.1.3"
+  resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.1.3.tgz#86eb75392805ddc33af71c92a0eedf74ee7604b2"
+
+is-date-object@^1.0.1:
+  version "1.0.1"
+  resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.1.tgz#9aa20eb6aeebbff77fbd33e74ca01b33581d3a16"
+
 is-directory@^0.3.1:
   version "0.3.1"
   resolved "https://registry.yarnpkg.com/is-directory/-/is-directory-0.3.1.tgz#61339b6f2475fc772fd9c9d83f5c8575dc154ae1"
@@ -3263,6 +3349,12 @@ is-redirect@^1.0.0:
   version "1.0.0"
   resolved "https://registry.yarnpkg.com/is-redirect/-/is-redirect-1.0.0.tgz#1d03dded53bd8db0f30c26e4f95d36fc7c87dc24"
 
+is-regex@^1.0.4:
+  version "1.0.4"
+  resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.0.4.tgz#5517489b547091b0930e095654ced25ee97e9491"
+  dependencies:
+    has "^1.0.1"
+
 is-retry-allowed@^1.0.0:
   version "1.1.0"
   resolved "https://registry.yarnpkg.com/is-retry-allowed/-/is-retry-allowed-1.1.0.tgz#11a060568b67339444033d0125a61a20d564fb34"
@@ -3277,6 +3369,10 @@ is-svg@^2.0.0:
   dependencies:
     html-comment-regex "^1.1.0"
 
+is-symbol@^1.0.1:
+  version "1.0.1"
+  resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.1.tgz#3cc59f00025194b6ab2e38dbae6689256b660572"
+
 is-typedarray@~1.0.0:
   version "1.0.0"
   resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a"
@@ -4011,7 +4107,7 @@ moment-timezone@^0.5.13:
   dependencies:
     moment ">= 2.9.0"
 
-moment@*, moment@2.18.1, "moment@>= 2.9.0", moment@>=2.14.0, moment@^2.16.0, moment@^2.18.1:
+moment@*, "moment@>= 2.9.0", moment@>=2.14.0, moment@^2.16.0, moment@^2.18.1:
   version "2.18.1"
   resolved "https://registry.yarnpkg.com/moment/-/moment-2.18.1.tgz#c36193dd3ce1c2eed2adb7c802dbbc77a81b1c0f"
 
@@ -4061,10 +4157,8 @@ ng2-auto-complete@^0.12.0:
   resolved "https://registry.yarnpkg.com/ng2-auto-complete/-/ng2-auto-complete-0.12.0.tgz#9a78c39c5012404e7bc8365c03815ab7f68cea3d"
 
 ngx-bootstrap@^1.6.6:
-  version "1.6.6"
-  resolved "https://registry.yarnpkg.com/ngx-bootstrap/-/ngx-bootstrap-1.6.6.tgz#0057141cfbdd7e8a50e81bda735fad8e95acb0dd"
-  dependencies:
-    moment "2.18.1"
+  version "1.9.3"
+  resolved "https://registry.yarnpkg.com/ngx-bootstrap/-/ngx-bootstrap-1.9.3.tgz#28e75d14fb1beaee609383d7694de4eb3ba03b26"
 
 no-case@^2.2.0:
   version "2.3.1"
@@ -4270,6 +4364,10 @@ object-component@0.0.3:
   version "0.0.3"
   resolved "https://registry.yarnpkg.com/object-component/-/object-component-0.0.3.tgz#f0c69aa50efc95b866c186f400a33769cb2f1291"
 
+object-keys@^1.0.8:
+  version "1.0.11"
+  resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.0.11.tgz#c54601778ad560f1142ce0e01bcca8b56d13426d"
+
 object.omit@^2.0.0:
   version "2.0.1"
   resolved "https://registry.yarnpkg.com/object.omit/-/object.omit-2.0.1.tgz#1a9c744829f39dbb858c76ca3579ae2a54ebd1fa"
@@ -4304,7 +4402,7 @@ opn@4.0.2:
     object-assign "^4.0.1"
     pinkie-promise "^2.0.0"
 
-opn@~5.1.0:
+opn@^5.1.0, opn@~5.1.0:
   version "5.1.0"
   resolved "https://registry.yarnpkg.com/opn/-/opn-5.1.0.tgz#72ce2306a17dbea58ff1041853352b4a8fc77519"
   dependencies:
@@ -5288,7 +5386,7 @@ rxjs@^5.1.0:
   dependencies:
     symbol-observable "^1.0.1"
 
-rxjs@^5.4.2:
+rxjs@^5.4.2, rxjs@^5.4.3:
   version "5.4.3"
   resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-5.4.3.tgz#0758cddee6033d68e0fd53676f0f3596ce3d483f"
   dependencies:
@@ -5976,6 +6074,10 @@ tsickle@^0.21.0:
     source-map "^0.5.6"
     source-map-support "^0.4.2"
 
+tslib@^1.7.1:
+  version "1.7.1"
+  resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.7.1.tgz#bc8004164691923a79fe8378bbeb3da2017538ec"
+
 tslint@~4.5.0:
   version "4.5.1"
   resolved "https://registry.yarnpkg.com/tslint/-/tslint-4.5.1.tgz#05356871bef23a434906734006fc188336ba824b"
@@ -6023,9 +6125,9 @@ typedarray@^0.0.6:
   version "2.5.2"
   resolved "https://registry.yarnpkg.com/typescript/-/typescript-2.5.2.tgz#038a95f7d9bbb420b1bf35ba31d4c5c1dd3ffe34"
 
-typescript@~2.2.0:
-  version "2.2.2"
-  resolved "https://registry.yarnpkg.com/typescript/-/typescript-2.2.2.tgz#606022508479b55ffa368b58fee963a03dfd7b0c"
+typescript@~2.5.0:
+  version "2.5.3"
+  resolved "https://registry.yarnpkg.com/typescript/-/typescript-2.5.3.tgz#df3dcdc38f3beb800d4bc322646b04a3f6ca7f0d"
 
 uglify-js@3.0.x:
   version "3.0.28"
@@ -6327,6 +6429,35 @@ webpack-dev-server@~2.7.1:
     webpack-dev-middleware "^1.11.0"
     yargs "^6.0.0"
 
+webpack-dev-server@~2.9.0:
+  version "2.9.1"
+  resolved "https://registry.yarnpkg.com/webpack-dev-server/-/webpack-dev-server-2.9.1.tgz#7ac9320b61b00eb65b2109f15c82747fc5b93585"
+  dependencies:
+    ansi-html "0.0.7"
+    array-includes "^3.0.3"
+    bonjour "^3.5.0"
+    chokidar "^1.6.0"
+    compression "^1.5.2"
+    connect-history-api-fallback "^1.3.0"
+    del "^3.0.0"
+    express "^4.13.3"
+    html-entities "^1.2.0"
+    http-proxy-middleware "~0.17.4"
+    internal-ip "1.2.0"
+    ip "^1.1.5"
+    loglevel "^1.4.1"
+    opn "^5.1.0"
+    portfinder "^1.0.9"
+    selfsigned "^1.9.1"
+    serve-index "^1.7.2"
+    sockjs "0.3.18"
+    sockjs-client "1.1.4"
+    spdy "^3.4.1"
+    strip-ansi "^3.0.1"
+    supports-color "^4.2.1"
+    webpack-dev-middleware "^1.11.0"
+    yargs "^6.6.0"
+
 webpack-merge@^4.1.0:
   version "4.1.0"
   resolved "https://registry.yarnpkg.com/webpack-merge/-/webpack-merge-4.1.0.tgz#6ad72223b3e0b837e531e4597c199f909361511e"
@@ -6367,6 +6498,33 @@ webpack@~3.5.5:
     webpack-sources "^1.0.1"
     yargs "^8.0.2"
 
+webpack@~3.6.0:
+  version "3.6.0"
+  resolved "https://registry.yarnpkg.com/webpack/-/webpack-3.6.0.tgz#a89a929fbee205d35a4fa2cc487be9cbec8898bc"
+  dependencies:
+    acorn "^5.0.0"
+    acorn-dynamic-import "^2.0.0"
+    ajv "^5.1.5"
+    ajv-keywords "^2.0.0"
+    async "^2.1.2"
+    enhanced-resolve "^3.4.0"
+    escope "^3.6.0"
+    interpret "^1.0.0"
+    json-loader "^0.5.4"
+    json5 "^0.5.1"
+    loader-runner "^2.3.0"
+    loader-utils "^1.1.0"
+    memory-fs "~0.4.1"
+    mkdirp "~0.5.0"
+    node-libs-browser "^2.0.0"
+    source-map "^0.5.3"
+    supports-color "^4.2.1"
+    tapable "^0.2.7"
+    uglifyjs-webpack-plugin "^0.4.6"
+    watchpack "^1.4.0"
+    webpack-sources "^1.0.1"
+    yargs "^8.0.2"
+
 websocket-driver@>=0.5.1:
   version "0.6.5"
   resolved "https://registry.yarnpkg.com/websocket-driver/-/websocket-driver-0.6.5.tgz#5cb2556ceb85f4373c6d8238aa691c8454e13a36"
@@ -6529,7 +6687,7 @@ yargs-parser@^7.0.0:
   dependencies:
     camelcase "^4.1.0"
 
-yargs@^6.0.0:
+yargs@^6.0.0, yargs@^6.6.0:
   version "6.6.0"
   resolved "https://registry.yarnpkg.com/yargs/-/yargs-6.6.0.tgz#782ec21ef403345f830a808ca3d513af56065208"
   dependencies:
@@ -6613,5 +6771,5 @@ zone.js@^0.8.14:
   resolved "https://registry.yarnpkg.com/zone.js/-/zone.js-0.8.17.tgz#4c5e5185a857da8da793daf3919371c5a36b2a0b"
 
 zone.js@^0.8.4:
-  version "0.8.11"
-  resolved "https://registry.yarnpkg.com/zone.js/-/zone.js-0.8.11.tgz#742befb17fbc49a571712b8c7d87e58ca26fd886"
+  version "0.8.18"
+  resolved "https://registry.yarnpkg.com/zone.js/-/zone.js-0.8.18.tgz#8cecb3977fcd1b3090562ff4570e2847e752b48d"


[34/50] [abbrv] ambari git commit: AMBARI-22098. UI Change: Rename 'Rerun Checks' button to 'Rerun Pre-Upgrade Checks'

Posted by rl...@apache.org.
AMBARI-22098. UI Change: Rename 'Rerun Checks' button to 'Rerun Pre-Upgrade Checks'


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/510a2620
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/510a2620
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/510a2620

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 510a2620db12e47741e7a00022d66d6bceb283b5
Parents: 045fc07
Author: Alex Antonenko <aa...@hortonworks.com>
Authored: Fri Sep 29 21:18:00 2017 +0300
Committer: Alex Antonenko <aa...@hortonworks.com>
Committed: Fri Sep 29 21:18:00 2017 +0300

----------------------------------------------------------------------
 ambari-web/app/messages.js | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/510a2620/ambari-web/app/messages.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/messages.js b/ambari-web/app/messages.js
index db25104..d0c7e39 100644
--- a/ambari-web/app/messages.js
+++ b/ambari-web/app/messages.js
@@ -1782,7 +1782,7 @@ Em.I18n.translations = {
   'admin.stackVersions.version.upgrade.upgradeOptions.HOU.title': "Host Ordered",
   'admin.stackVersions.version.upgrade.upgradeOptions.EU.description': "Services are stopped while the upgrade is performed. Incurs downtime, but faster upgrade.",
   'admin.stackVersions.version.upgrade.upgradeOptions.errors_bypassed': "Bypassed errors,<br/>proceed at your own risk.",
-  'admin.stackVersions.version.upgrade.upgradeOptions.preCheck.rerun':'Rerun Checks',
+  'admin.stackVersions.version.upgrade.upgradeOptions.preCheck.rerun':'Rerun Pre-Upgrade Checks',
   'admin.stackVersions.version.upgrade.upgradeOptions.preCheck.msg.title':'Checks:',
   'admin.stackVersions.version.upgrade.upgradeOptions.preCheck.msg.checking': 'Checking...',
   'admin.stackVersions.version.upgrade.upgradeOptions.preCheck.msg.failed.title': 'Check failed',


[03/50] [abbrv] ambari git commit: AMBARI-22058 Dashboard heatmaps tab loading stuck (dsen)

Posted by rl...@apache.org.
AMBARI-22058 Dashboard heatmaps tab loading stuck (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7af3152c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7af3152c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7af3152c

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 7af3152c5baf440bfc6d48f783e7133b41d434f9
Parents: fe7a8e4
Author: Dmytro Sen <ds...@apache.org>
Authored: Tue Sep 26 17:13:55 2017 +0300
Committer: Dmytro Sen <ds...@apache.org>
Committed: Tue Sep 26 17:13:55 2017 +0300

----------------------------------------------------------------------
 .../internal/ServiceResourceProvider.java       |  8 ++
 .../AmbariManagementControllerTest.java         | 11 +++
 .../resources/stacks/OTHER/1.0/widgets.json     | 95 ++++++++++++++++++++
 3 files changed, 114 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/7af3152c/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java
index e65693b..76a4547 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java
@@ -423,6 +423,8 @@ public class ServiceResourceProvider extends AbstractControllerResourceProvider
     // do all validation checks
     validateCreateRequests(requests, clusters);
 
+    Set<Cluster> clustersSetFromRequests = new HashSet<>();
+
     for (ServiceRequest request : requests) {
       Cluster cluster = clusters.getCluster(request.getClusterName());
 
@@ -478,6 +480,12 @@ public class ServiceResourceProvider extends AbstractControllerResourceProvider
 
       // Initialize service widgets
       getManagementController().initializeWidgetsAndLayouts(cluster, s);
+      clustersSetFromRequests.add(cluster);
+    }
+
+    // Create cluster widgets and layouts
+    for (Cluster cluster : clustersSetFromRequests) {
+      getManagementController().initializeWidgetsAndLayouts(cluster, null);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/7af3152c/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
index 07ba786..b370829 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/AmbariManagementControllerTest.java
@@ -10423,6 +10423,17 @@ public class AmbariManagementControllerTest {
     Assert.assertEquals("FILES_LOCAL", layoutUserWidgetEntities.get(2).getWidget().getWidgetName());
     Assert.assertEquals("UPDATED_BLOCKED_TIME", layoutUserWidgetEntities.get(3).getWidget().getWidgetName());
     Assert.assertEquals("HBASE_SUMMARY", layoutUserWidgetEntities.get(0).getWidget().getDefaultSectionName());
+
+    candidateLayoutEntity = null;
+    for (WidgetLayoutEntity entity : layoutEntities) {
+      if (entity.getLayoutName().equals("default_system_heatmap")) {
+        candidateLayoutEntity = entity;
+        break;
+      }
+    }
+    Assert.assertNotNull(candidateLayoutEntity);
+    Assert.assertEquals("ambari", candidateVisibleEntity.getAuthor());
+    Assert.assertEquals("CLUSTER", candidateVisibleEntity.getScope());
   }
 
   // this is a temporary measure as a result of moving updateHostComponents from AmbariManagementController

http://git-wip-us.apache.org/repos/asf/ambari/blob/7af3152c/ambari-server/src/test/resources/stacks/OTHER/1.0/widgets.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/stacks/OTHER/1.0/widgets.json b/ambari-server/src/test/resources/stacks/OTHER/1.0/widgets.json
new file mode 100644
index 0000000..3176354
--- /dev/null
+++ b/ambari-server/src/test/resources/stacks/OTHER/1.0/widgets.json
@@ -0,0 +1,95 @@
+{
+  "layouts": [
+    {
+      "layout_name": "default_system_heatmap",
+      "display_name": "Heatmaps",
+      "section_name": "SYSTEM_HEATMAPS",
+      "widgetLayoutInfo": [
+        {
+          "widget_name": "Host Disk Space Used %",
+          "description": "",
+          "widget_type": "HEATMAP",
+          "is_visible": true,
+          "metrics": [
+            {
+              "name": "disk_free",
+              "metric_path": "metrics/disk/disk_free",
+              "service_name": "STACK"
+            },
+            {
+              "name": "disk_total",
+              "metric_path": "metrics/disk/disk_total",
+              "service_name": "STACK"
+            }
+          ],
+          "values": [
+            {
+              "name": "Host Disk Space Used %",
+              "value": "${((disk_total-disk_free)/disk_total)*100}"
+            }
+          ],
+          "properties": {
+            "display_unit": "%",
+            "max_limit": "100"
+          }
+        },
+        {
+          "widget_name": "Host Memory Used %",
+          "description": "",
+          "widget_type": "HEATMAP",
+          "is_visible": false,
+          "metrics": [
+            {
+              "name": "mem_total",
+              "metric_path": "metrics/memory/mem_total",
+              "service_name": "STACK"
+            },
+            {
+              "name": "mem_free",
+              "metric_path": "metrics/memory/mem_free",
+              "service_name": "STACK"
+            },
+            {
+              "name": "mem_cached",
+              "metric_path": "metrics/memory/mem_cached",
+              "service_name": "STACK"
+            }
+          ],
+          "values": [
+            {
+              "name": "Host Memory Used %",
+              "value": "${((mem_total-mem_free-mem_cached)/mem_total)*100}"
+            }
+          ],
+          "properties": {
+            "display_unit": "%",
+            "max_limit": "100"
+          }
+        },
+        {
+          "widget_name": "Host CPU Wait IO %",
+          "description": "",
+          "widget_type": "HEATMAP",
+          "is_visible": false,
+          "metrics": [
+            {
+              "name": "cpu_wio",
+              "metric_path": "metrics/cpu/cpu_wio",
+              "service_name": "STACK"
+            }
+          ],
+          "values": [
+            {
+              "name": "Host Memory Used %",
+              "value": "${cpu_wio*100}"
+            }
+          ],
+          "properties": {
+            "display_unit": "%",
+            "max_limit": "100"
+          }
+        }
+      ]
+    }
+  ]
+}


[17/50] [abbrv] ambari git commit: AMBARI-22076 - Use A Scoped Repo When Installing stack-tools (jonathanhurley)

Posted by rl...@apache.org.
AMBARI-22076 - Use A Scoped Repo When Installing stack-tools (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4da28dff
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4da28dff
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4da28dff

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 4da28dffffa7b2fe362d52de34ec3f0ec47bfb30
Parents: 40e712a
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Wed Sep 27 11:52:11 2017 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Wed Sep 27 14:04:40 2017 -0400

----------------------------------------------------------------------
 .../core/providers/package/yumrpm.py             |  2 +-
 .../custom_actions/scripts/install_packages.py   | 19 ++++++++++++++-----
 .../python/custom_actions/TestInstallPackages.py | 14 +++++++-------
 3 files changed, 22 insertions(+), 13 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/4da28dff/ambari-common/src/main/python/resource_management/core/providers/package/yumrpm.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/core/providers/package/yumrpm.py b/ambari-common/src/main/python/resource_management/core/providers/package/yumrpm.py
index 78c77ca..a4a55da 100644
--- a/ambari-common/src/main/python/resource_management/core/providers/package/yumrpm.py
+++ b/ambari-common/src/main/python/resource_management/core/providers/package/yumrpm.py
@@ -216,7 +216,7 @@ class YumProvider(RPMBasedPackageProvider):
       cmd = INSTALL_CMD[self.get_logoutput()]
       if use_repos:
         enable_repo_option = '--enablerepo=' + ",".join(use_repos)
-        disable_repo_option = '--disablerepo=' + "*,".join(skip_repos)
+        disable_repo_option = '--disablerepo=' + "*" if len(skip_repos) == 0 else ','.join(skip_repos)
         cmd = cmd + [disable_repo_option, enable_repo_option]
       cmd = cmd + [name]
       Logger.info("Installing package %s ('%s')" % (name, string_cmd_from_args_list(cmd)))

http://git-wip-us.apache.org/repos/asf/ambari/blob/4da28dff/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py b/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
index 6a80a43..dcf3544 100644
--- a/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
+++ b/ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
@@ -316,19 +316,28 @@ class InstallPackages(Script):
     packages_were_checked = False
     packages_installed_before = []
     stack_selector_package = stack_tools.get_stack_tool_package(stack_tools.STACK_SELECTOR_NAME)
+
     try:
+      # install the stack-selector; we need to supply the action as "upgrade" here since the normal
+      # install command will skip if the package is already installed in the system.
+      # This is required for non-versioned components, like stack-select, since each version of
+      # the stack comes with one. Also, scope the install by repository since we need to pick a
+      # specific repo that the stack-select tools are coming out of in case there are multiple
+      # patches installed
+      repositories = config['repositoryFile']['repositories']
+      repository_ids = [repository['repoId'] for repository in repositories]
       Package(stack_selector_package,
-              action="upgrade",
-              retry_on_repo_unavailability=agent_stack_retry_on_unavailability,
-              retry_count=agent_stack_retry_count
-      )
+        action="upgrade",
+        use_repos=repository_ids,
+        retry_on_repo_unavailability=agent_stack_retry_on_unavailability,
+        retry_count=agent_stack_retry_count)
       
       packages_installed_before = self.pkg_provider.all_installed_packages()
       packages_installed_before = [package[0] for package in packages_installed_before]
       packages_were_checked = True
       filtered_package_list = self.filter_package_list(package_list)
       try:
-        available_packages_in_repos = self.pkg_provider.get_available_packages_in_repos(config['repositoryFile']['repositories'])
+        available_packages_in_repos = self.pkg_provider.get_available_packages_in_repos(repositories)
       except Exception:
         available_packages_in_repos = []
       for package in filtered_package_list:

http://git-wip-us.apache.org/repos/asf/ambari/blob/4da28dff/ambari-server/src/test/python/custom_actions/TestInstallPackages.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/custom_actions/TestInstallPackages.py b/ambari-server/src/test/python/custom_actions/TestInstallPackages.py
index a946fdd..0f303be 100644
--- a/ambari-server/src/test/python/custom_actions/TestInstallPackages.py
+++ b/ambari-server/src/test/python/custom_actions/TestInstallPackages.py
@@ -139,7 +139,7 @@ class TestInstallPackages(RMFTestCase):
                                 mirror_list=None,
                                 append_to_file=True,
       )
-      self.assertResourceCalled('Package', 'hdp-select', action=["upgrade"], retry_count=5, retry_on_repo_unavailability=False)
+      self.assertResourceCalled('Package', 'hdp-select', action=["upgrade"], use_repos=['HDP-UTILS-1.1.0.20', 'HDP-2.2'], retry_count=5, retry_on_repo_unavailability=False)
       self.assertResourceCalled('Package', 'hadoop_2_2_0_1_885', action=["upgrade"], retry_count=5, retry_on_repo_unavailability=False)
       self.assertResourceCalled('Package', 'snappy', action=["upgrade"], retry_count=5, retry_on_repo_unavailability=False)
       self.assertResourceCalled('Package', 'snappy-devel', action=["upgrade"], retry_count=5, retry_on_repo_unavailability=False)
@@ -190,7 +190,7 @@ class TestInstallPackages(RMFTestCase):
                          'repository_version_id': 1,
                          'actual_version': VERSION_STUB})
 
-      self.assertResourceCalled('Package', 'hdp-select', action=["upgrade"], retry_count=5, retry_on_repo_unavailability=False)
+      self.assertResourceCalled('Package', 'hdp-select', action=["upgrade"], use_repos=[], retry_count=5, retry_on_repo_unavailability=False)
       self.assertResourceCalled('Package', None, action=["upgrade"], retry_count=5, retry_on_repo_unavailability=False)
       self.assertResourceCalled('Package', 'snappy', action=["upgrade"], retry_count=5, retry_on_repo_unavailability=False)
       self.assertResourceCalled('Package', 'snappy-devel', action=["upgrade"], retry_count=5, retry_on_repo_unavailability=False)
@@ -254,7 +254,7 @@ class TestInstallPackages(RMFTestCase):
                                 mirror_list=None,
                                 append_to_file=True,
                                 )
-      self.assertResourceCalled('Package', 'hdp-select', action=["upgrade"], retry_count=5, retry_on_repo_unavailability=False)
+      self.assertResourceCalled('Package', 'hdp-select', action=["upgrade"], use_repos=['HDP-UTILS-1.1.0.20', 'HDP-2.2'], retry_count=5, retry_on_repo_unavailability=False)
       self.assertResourceCalled('Package', 'hadoop_2_2_0_1_885', action=["upgrade"], retry_count=5, retry_on_repo_unavailability=False)
       self.assertResourceCalled('Package', 'snappy', action=["upgrade"], retry_count=5, retry_on_repo_unavailability=False)
       self.assertResourceCalled('Package', 'snappy-devel', action=["upgrade"], retry_count=5, retry_on_repo_unavailability=False)
@@ -319,7 +319,7 @@ class TestInstallPackages(RMFTestCase):
                                 mirror_list=None,
                                 append_to_file=True,
       )
-      self.assertResourceCalled('Package', 'hdp-select', action=["upgrade"], retry_count=5, retry_on_repo_unavailability=False)
+      self.assertResourceCalled('Package', 'hdp-select', action=["upgrade"], use_repos=['HDP-UTILS-1.1.0.20', 'HDP-2.2'], retry_count=5, retry_on_repo_unavailability=False)
       self.assertResourceCalled('Package', 'hadoop_2_2_0_1_885', action=["upgrade"], retry_count=5, retry_on_repo_unavailability=False)
       self.assertResourceCalled('Package', 'snappy', action=["upgrade"], retry_count=5, retry_on_repo_unavailability=False)
       self.assertResourceCalled('Package', 'snappy-devel', action=["upgrade"], retry_count=5, retry_on_repo_unavailability=False)
@@ -464,7 +464,7 @@ class TestInstallPackages(RMFTestCase):
                                 mirror_list=None,
                                 append_to_file=True,
                                 )
-      self.assertResourceCalled('Package', 'hdp-select', action=["upgrade"], retry_count=5, retry_on_repo_unavailability=False)
+      self.assertResourceCalled('Package', 'hdp-select', action=["upgrade"], use_repos=['HDP-UTILS-1.1.0.20', 'HDP-2.2'], retry_count=5, retry_on_repo_unavailability=False)
       self.assertResourceCalled('Package', 'hadoop_2_2_0_1_885', action=["upgrade"], retry_count=5, retry_on_repo_unavailability=False)
       self.assertResourceCalled('Package', 'snappy', action=["upgrade"], retry_count=5, retry_on_repo_unavailability=False)
       self.assertResourceCalled('Package', 'snappy-devel', action=["upgrade"], retry_count=5, retry_on_repo_unavailability=False)
@@ -539,7 +539,7 @@ class TestInstallPackages(RMFTestCase):
                                 mirror_list=None,
                                 append_to_file=True,
                                 )
-      self.assertResourceCalled('Package', 'hdp-select', action=["upgrade"], retry_count=5, retry_on_repo_unavailability=False)
+      self.assertResourceCalled('Package', 'hdp-select', action=["upgrade"], use_repos=['HDP-UTILS-1.1.0.20-repo-4', 'HDP-2.2-repo-4'], retry_count=5, retry_on_repo_unavailability=False)
       self.assertResourceCalled('Package', 'hadoop_2_2_0_1_885', action=["upgrade"], retry_count=5,
                                 retry_on_repo_unavailability=False)
       self.assertResourceCalled('Package', 'snappy', action=["upgrade"], retry_count=5, retry_on_repo_unavailability=False)
@@ -1177,7 +1177,7 @@ class TestInstallPackages(RMFTestCase):
                                 mirror_list=None,
                                 append_to_file=True,
       )
-      self.assertResourceCalled('Package', 'hdp-select', action=["upgrade"], retry_count=5, retry_on_repo_unavailability=False)
+      self.assertResourceCalled('Package', 'hdp-select', action=["upgrade"], use_repos=['HDP-UTILS-1.1.0.20-repo-4', 'HDP-2.2-repo-4'], retry_count=5, retry_on_repo_unavailability=False)
       self.assertResourceCalled('Package', 'hadoop_2_2_0_1_885', action=["upgrade"], retry_count=5,
                                 retry_on_repo_unavailability=False)
       self.assertResourceCalled('Package', 'snappy', action=["upgrade"], retry_count=5, retry_on_repo_unavailability=False)


[09/50] [abbrv] ambari git commit: AMBARI-22061. Solr Data Manager script should provide non-destructive archive download option (mgergely)

Posted by rl...@apache.org.
AMBARI-22061. Solr Data Manager script should provide non-destructive archive download option (mgergely)

Change-Id: I2fd9ef9e2de840e25649feb5f39b95dd36e7cc7e


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9d802b7c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9d802b7c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9d802b7c

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 9d802b7c11a62336fd4de8aa2695af02b061c625
Parents: dc419b4
Author: Miklos Gergely <mg...@hortonworks.com>
Authored: Wed Sep 27 11:15:01 2017 +0200
Committer: Miklos Gergely <mg...@hortonworks.com>
Committed: Wed Sep 27 11:15:11 2017 +0200

----------------------------------------------------------------------
 .../src/main/python/solrDataManager.py          | 76 +++++++++++---------
 1 file changed, 43 insertions(+), 33 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/9d802b7c/ambari-infra/ambari-infra-solr-client/src/main/python/solrDataManager.py
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-solr-client/src/main/python/solrDataManager.py b/ambari-infra/ambari-infra-solr-client/src/main/python/solrDataManager.py
index e0356bb..2675bd9 100644
--- a/ambari-infra/ambari-infra-solr-client/src/main/python/solrDataManager.py
+++ b/ambari-infra/ambari-infra-solr-client/src/main/python/solrDataManager.py
@@ -47,7 +47,7 @@ verbose = False
 def parse_arguments():
   parser = optparse.OptionParser("usage: %prog [options]", version="Solr Data Manager {0}".format(VERSION))
 
-  parser.add_option("-m", "--mode", dest="mode", type="string", help="delete | save")
+  parser.add_option("-m", "--mode", dest="mode", type="string", help="archive | delete | save")
   parser.add_option("-s", "--solr-url", dest="solr_url", type="string", help="the url of the solr server including the port")
   parser.add_option("-c", "--collection", dest="collection", type="string", help="the name of the solr collection")
   parser.add_option("-f", "--filter-field", dest="filter_field", type="string", help="the name of the field to filter on")
@@ -98,14 +98,14 @@ def parse_arguments():
       parser.print_help()
       sys.exit()
   
-  mode_values = ["delete", "save"]
+  mode_values = ["archive", "delete", "save"]
   if options.mode not in mode_values:
     print "mode must be one of {0}".format(" | ".join(mode_values))
     parser.print_help()
     sys.exit()
 
   if options.mode == "delete":
-    for r in ["name", "compression", "hdfs_keytab", "hdfs_principal", "hdfs_user", "hdfs_path", "key_file_path", "bucket", "key_prefix", "local_path"]:
+    for r in ["name", "hdfs_keytab", "hdfs_principal", "hdfs_user", "hdfs_path", "key_file_path", "bucket", "key_prefix", "local_path"]:
       if options.__dict__[r] is not None:
         print "argument '{0}' may not be specified in delete mode".format(r)
         parser.print_help()
@@ -153,7 +153,7 @@ def parse_arguments():
     parser.print_help()
     sys.exit()
 
-  if options.mode == "save":
+  if options.mode in ["archive", "save"]:
     count = (1 if is_any_hdfs_property else 0) + (1 if is_any_s3_property else 0) + \
             (1 if options.__dict__["local_path"] is not None else 0)
     if count != 1:
@@ -171,7 +171,7 @@ def parse_arguments():
   print("  solr-url: " + options.solr_url)
   print("  collection: " + options.collection)
   print("  filter-field: " + options.filter_field)
-  if options.mode == "save":
+  if options.mode in ["archive", "save"]:
     print("  id-field: " + options.id_field)
   if options.__dict__["end"] is not None:
     print("  end: " + options.end)
@@ -182,14 +182,14 @@ def parse_arguments():
     print("  additional-filter: " + str(options.additional_filter))
   if options.__dict__["name"] is not None:
     print("  name: " + str(options.name))
-  if options.mode == "save":
+  if options.mode in ["archive", "save"]:
     print("  read-block-size: " + str(options.read_block_size))
     print("  write-block-size: " + str(options.write_block_size))
     print("  ignore-unfinished-uploading: " + str(options.ignore_unfinished_uploading))
   if (options.__dict__["solr_keytab"] is not None):
     print("  solr-keytab: " + options.solr_keytab)
     print("  solr-principal: " + options.solr_principal)
-  if options.mode == "save":
+  if options.mode in ["archive", "save"]:
     print("  output: " + ("json" if options.json_file else "line-delimited-json"))
     print("  compression: " + options.compression)
   if (options.__dict__["hdfs_keytab"] is not None):
@@ -251,7 +251,7 @@ def delete(solr_url, collection, filter_field, end, solr_keytab, solr_principal)
   
   query_solr(solr_kinit_command, delete_command, "{0} {1}".format(curl_prefix, delete_command), "Deleting")
 
-def save(solr_url, collection, filter_field, id_field, range_end, read_block_size, write_block_size,
+def save(mode, solr_url, collection, filter_field, id_field, range_end, read_block_size, write_block_size,
          ignore_unfinished_uploading, additional_filter, name, solr_keytab, solr_principal, json_file,
          compression, hdfs_keytab, hdfs_principal, hdfs_user, hdfs_path, key_file_path, bucket, key_prefix, local_path):
   solr_kinit_command = None
@@ -269,9 +269,11 @@ def save(solr_url, collection, filter_field, id_field, range_end, read_block_siz
     ensure_hdfs_path(hdfs_kinit_command, hdfs_user, hdfs_path)
 
   working_dir = get_working_dir(solr_url, collection)
-  handle_unfinished_uploading(solr_kinit_command, hdfs_kinit_command, curl_prefix, working_dir, ignore_unfinished_uploading)
-  save_data(solr_kinit_command, hdfs_kinit_command, curl_prefix, solr_url, collection, filter_field, id_field, range_end,
-            read_block_size, write_block_size, working_dir, additional_filter, name, json_file, compression,
+  if mode == "archive":
+    handle_unfinished_uploading(solr_kinit_command, hdfs_kinit_command, curl_prefix, working_dir, ignore_unfinished_uploading)
+  
+  save_data(mode, solr_kinit_command, hdfs_kinit_command, curl_prefix, solr_url, collection, filter_field, id_field,
+            range_end, read_block_size, write_block_size, working_dir, additional_filter, name, json_file, compression,
             hdfs_user, hdfs_path, key_file_path, bucket, key_prefix, local_path)
 
 def ensure_hdfs_path(hdfs_kinit_command, hdfs_user, hdfs_path):
@@ -341,7 +343,7 @@ def handle_unfinished_uploading(solr_kinit_command, hdfs_kinit_command, curl_pre
     
     os.remove(command_json_path)
 
-def save_data(solr_kinit_command, hdfs_kinit_command, curl_prefix, solr_url, collection, filter_field, id_field,
+def save_data(mode, solr_kinit_command, hdfs_kinit_command, curl_prefix, solr_url, collection, filter_field, id_field,
               range_end, read_block_size, write_block_size, working_dir, additional_filter, name, json_file,
               compression, hdfs_user, hdfs_path, key_file_path, bucket, key_prefix, local_path):
   logger.info("Starting to save data")
@@ -370,9 +372,9 @@ def save_data(solr_kinit_command, hdfs_kinit_command, curl_prefix, solr_url, col
     prev_lot_end_id = results[3]
     
     if records > 0:
-      upload_block(solr_kinit_command, hdfs_kinit_command, curl_prefix, solr_url, collection, filter_field, id_field,
-                   working_dir, tmp_file_path, name, prev_lot_end_value, prev_lot_end_id, hdfs_user, hdfs_path,
-                   key_file_path, bucket, key_prefix, local_path, compression)
+      upload_block(mode, solr_kinit_command, hdfs_kinit_command, curl_prefix, solr_url, collection, filter_field,
+                   id_field, working_dir, tmp_file_path, name, prev_lot_end_value, prev_lot_end_id, hdfs_user,
+                   hdfs_path, key_file_path, bucket, key_prefix, local_path, compression)
       total_records += records
       logger.info("A total of %d records are saved", total_records)
 
@@ -441,8 +443,8 @@ def finish_file(tmp_file, json_file):
   if json_file:
     tmp_file.write("\n}")
 
-def upload_block(solr_kinit_command, hdfs_kinit_command, curl_prefix, solr_url, collection, filter_field, id_field,
-                 working_dir, tmp_file_path, name, prev_lot_end_value, prev_lot_end_id, hdfs_user, hdfs_path,
+def upload_block(mode, solr_kinit_command, hdfs_kinit_command, curl_prefix, solr_url, collection, filter_field,
+                 id_field, working_dir, tmp_file_path, name, prev_lot_end_value, prev_lot_end_id, hdfs_user, hdfs_path,
                  key_file_path, bucket, key_prefix, local_path, compression):
   if name:
     file_name = "{0}_-_{1}_-_{2}_-_{3}".format(collection, name, prev_lot_end_value, prev_lot_end_id).replace(':', '_')
@@ -451,9 +453,9 @@ def upload_block(solr_kinit_command, hdfs_kinit_command, curl_prefix, solr_url,
   
   upload_file_path = compress_file(working_dir, tmp_file_path, file_name, compression)
   
-  upload_command = create_command_file(True, working_dir, upload_file_path, solr_url, collection, filter_field, id_field,
-                                       prev_lot_end_value, prev_lot_end_id, hdfs_user, hdfs_path, key_file_path, bucket,
-                                       key_prefix, local_path)
+  upload_command = create_command_file(mode, True, working_dir, upload_file_path, solr_url, collection, filter_field,
+                                       id_field, prev_lot_end_value, prev_lot_end_id, hdfs_user, hdfs_path,
+                                       key_file_path, bucket, key_prefix, local_path)
   if hdfs_user:
     upload_file_hdfs(hdfs_kinit_command, upload_command, upload_file_path, hdfs_path, hdfs_user)
   elif key_file_path:
@@ -464,11 +466,12 @@ def upload_block(solr_kinit_command, hdfs_kinit_command, curl_prefix, solr_url,
     logger.warn("Unknown upload destination")
     sys.exit()
   
-  delete_command = create_command_file(False, working_dir, upload_file_path, solr_url, collection, filter_field, id_field,
-                                       prev_lot_end_value, prev_lot_end_id, None, None, None, None, None, None)
-  delete_data(solr_kinit_command, curl_prefix, delete_command, collection, filter_field, id_field, prev_lot_end_value, prev_lot_end_id)
-  
-  os.remove("{0}/command.json".format(working_dir))
+  delete_command = create_command_file(mode, False, working_dir, upload_file_path, solr_url, collection, filter_field,
+                                       id_field, prev_lot_end_value, prev_lot_end_id, None, None, None, None, None, None)
+  if mode == "archive":
+    delete_data(solr_kinit_command, curl_prefix, delete_command, collection, filter_field, id_field, prev_lot_end_value,
+                prev_lot_end_id)
+    os.remove("{0}/command.json".format(working_dir))
 
 def compress_file(working_dir, tmp_file_path, file_name, compression):
   data_file_name = "{0}.json".format(file_name)
@@ -511,8 +514,9 @@ def compress_file(working_dir, tmp_file_path, file_name, compression):
   
   return upload_file_path
 
-def create_command_file(upload, working_dir, upload_file_path, solr_url, collection, filter_field, id_field, prev_lot_end_value,
-                        prev_lot_end_id, hdfs_user, hdfs_path, key_file_path, bucket, key_prefix, local_path):
+def create_command_file(mode, upload, working_dir, upload_file_path, solr_url, collection, filter_field, id_field,
+                        prev_lot_end_value, prev_lot_end_id, hdfs_user, hdfs_path, key_file_path, bucket, key_prefix,
+                        local_path):
   commands = {}
   
   if upload:
@@ -551,6 +555,9 @@ def create_command_file(upload, working_dir, upload_file_path, solr_url, collect
     else:
       logger.warn("Unknown upload destination")
       sys.exit()
+    
+    if mode == "save":
+      return upload_command
 
   
   delete_prev = "{0}:[*+TO+\"{1}\"]".format(filter_field, prev_lot_end_value)
@@ -558,6 +565,9 @@ def create_command_file(upload, working_dir, upload_file_path, solr_url, collect
   delete_query = quote("{0}+OR+{1}".format(delete_prev, delete_last), safe="/+\"*")
   delete_command = "{0}/{1}/update?stream.body=<delete><query>{2}</query></delete>&commit=true&wt=json" \
     .format(solr_url, collection, delete_query)
+  if mode == "save":
+    return delete_command
+  
   delete_command_data = {}
   delete_command_data["command"] = delete_command
   delete_command_data["collection"] = collection
@@ -710,12 +720,12 @@ if __name__ == '__main__':
     
     if options.mode == "delete":
       delete(options.solr_url, options.collection, options.filter_field, end, options.solr_keytab, options.solr_principal)
-    elif options.mode == "save":
-      save(options.solr_url, options.collection, options.filter_field, options.id_field, end, options.read_block_size,
-           options.write_block_size, options.ignore_unfinished_uploading, options.additional_filter, options.name,
-           options.solr_keytab, options.solr_principal, options.json_file, options.compression,
-           options.hdfs_keytab, options.hdfs_principal, options.hdfs_user, options.hdfs_path, options.key_file_path,
-           options.bucket, options.key_prefix, options.local_path)
+    elif options.mode in ["archive", "save"]:
+      save(options.mode, options.solr_url, options.collection, options.filter_field, options.id_field, end,
+           options.read_block_size, options.write_block_size, options.ignore_unfinished_uploading,
+           options.additional_filter, options.name, options.solr_keytab, options.solr_principal, options.json_file,
+           options.compression, options.hdfs_keytab, options.hdfs_principal, options.hdfs_user, options.hdfs_path,
+           options.key_file_path, options.bucket, options.key_prefix, options.local_path)
     else:
       logger.warn("Unknown mode: %s", options.mode)
     


[27/50] [abbrv] ambari git commit: AMBARI-22072. Allow for backward compatibility in how identity references are specified (rlevas)

Posted by rl...@apache.org.
AMBARI-22072. Allow for backward compatibility in how identity references are specified (rlevas)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7950e3c1
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7950e3c1
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7950e3c1

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 7950e3c1166d51501c3baf7b0cd1d363f2426703
Parents: f744a36
Author: Robert Levas <rl...@hortonworks.com>
Authored: Thu Sep 28 16:36:43 2017 -0400
Committer: Robert Levas <rl...@hortonworks.com>
Committed: Thu Sep 28 16:36:43 2017 -0400

----------------------------------------------------------------------
 .../AbstractKerberosDescriptorContainer.java    | 26 ++++++++-
 .../state/kerberos/KerberosDescriptor.java      |  3 +-
 .../state/kerberos/KerberosDescriptorTest.java  | 56 ++++++++++++++++++++
 3 files changed, 83 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/7950e3c1/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/AbstractKerberosDescriptorContainer.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/AbstractKerberosDescriptorContainer.java b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/AbstractKerberosDescriptorContainer.java
index 4255dd1..9ddb941 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/AbstractKerberosDescriptorContainer.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/AbstractKerberosDescriptorContainer.java
@@ -855,7 +855,31 @@ public abstract class AbstractKerberosDescriptorContainer extends AbstractKerber
     if (identity != null) {
       KerberosIdentityDescriptor referencedIdentity;
       try {
-        referencedIdentity = getReferencedIdentityDescriptor(identity.getReference());
+        if (identity.getReference() != null) {
+          referencedIdentity = getReferencedIdentityDescriptor(identity.getReference());
+        } else {
+          // For backwards compatibility, see if the identity's name indicates a reference...
+          referencedIdentity = getReferencedIdentityDescriptor(identity.getName());
+
+          if(referencedIdentity != null) {
+            // Calculate the path to this identity descriptor for logging purposes.
+            // Examples:
+            //   /
+            //   /SERVICE
+            //   /SERVICE/COMPONENT
+            StringBuilder path = new StringBuilder();
+            AbstractKerberosDescriptor parent = identity.getParent();
+            while(parent != null && (parent.getName() != null)) {
+              path.insert(0, parent.getName());
+              path.insert(0, '/');
+              parent = parent.getParent();
+            }
+
+            // Log this since it is deprecated...
+            LOG.warn("Referenced identities should be declared using the identity's \"reference\" attribute, not the identity's \"name\" attribute." +
+                " This is a deprecated feature. Problems may occur in the future unless this is corrected: {}:{}", path, identity.getName());
+          }
+        }
       } catch (AmbariException e) {
         throw new AmbariException(String.format("Invalid Kerberos identity reference: %s", identity.getReference()), e);
       }

http://git-wip-us.apache.org/repos/asf/ambari/blob/7950e3c1/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosDescriptor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosDescriptor.java b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosDescriptor.java
index 0c7a9a9..b2b9a60 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosDescriptor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/kerberos/KerberosDescriptor.java
@@ -456,7 +456,8 @@ public class KerberosDescriptor extends AbstractKerberosDescriptorContainer {
 
   private static void collectFromIdentities(String service, String component, Collection<KerberosIdentityDescriptor> identities, Map<String, String> result) {
     for (KerberosIdentityDescriptor each : identities) {
-      if (each.getPrincipalDescriptor() != null && !each.getReferencedServiceName().isPresent()) {
+      if (each.getPrincipalDescriptor() != null && !each.getReferencedServiceName().isPresent() &&
+          !each.getName().startsWith("/")) {
         String path = StringUtils.isBlank(component)
             ? String.format("%s/%s", service, each.getName())
             : String.format("%s/%s/%s", service, component, each.getName());

http://git-wip-us.apache.org/repos/asf/ambari/blob/7950e3c1/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorTest.java
index d0d57d5..d6bef02 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/state/kerberos/KerberosDescriptorTest.java
@@ -59,6 +59,41 @@ public class KerberosDescriptorTest {
           "    ]" +
           "}";
 
+  private static final String JSON_VALUE_IDENTITY_REFERENCES =
+      "{" +
+          "  \"identities\": [" +
+          "    {" +
+          "      \"keytab\": {" +
+          "        \"file\": \"${keytab_dir}/spnego.service.keytab\"" +
+          "      }," +
+          "      \"name\": \"spnego\"," +
+          "      \"principal\": {" +
+          "        \"type\": \"service\"," +
+          "        \"value\": \"HTTP/_HOST@${realm}\"" +
+          "      }" +
+          "    }" +
+          "  ]," +
+          "  \"services\": [" +
+          "    {" +
+          "      \"identities\": [" +
+          "        {" +
+          "          \"name\": \"service1_spnego\"," +
+          "          \"reference\": \"/spnego\"" +
+          "        }" +
+          "      ]," +
+          "      \"name\": \"SERVICE1\"" +
+          "    }," +
+          "    {" +
+          "      \"identities\": [" +
+          "        {" +
+          "          \"name\": \"/spnego\"" +
+          "        }" +
+          "      ]," +
+          "      \"name\": \"SERVICE2\"" +
+          "    }" +
+          "  ]" +
+          "}";
+
   private static final Map<String, Object> MAP_VALUE;
 
   static {
@@ -502,4 +537,25 @@ public class KerberosDescriptorTest {
     Assert.assertEquals("service2_component1@${realm}", principalsPerComponent.get("SERVICE2/SERVICE2_COMPONENT1/service2_component1_identity"));
     Assert.assertEquals("service1@${realm}", principalsPerComponent.get("SERVICE1/service1_identity"));
   }
+
+  @Test
+  public void testIdentityReferences() throws Exception {
+    KerberosDescriptor kerberosDescriptor = KERBEROS_DESCRIPTOR_FACTORY.createInstance(JSON_VALUE_IDENTITY_REFERENCES);
+    KerberosServiceDescriptor serviceDescriptor;
+    List<KerberosIdentityDescriptor> identities;
+
+    // Reference is determined using the "reference" attribute
+    serviceDescriptor = kerberosDescriptor.getService("SERVICE1");
+    identities = serviceDescriptor.getIdentities(true, null);
+    Assert.assertEquals(1, identities.size());
+    Assert.assertEquals("service1_spnego", identities.get(0).getName());
+    Assert.assertEquals("/spnego", identities.get(0).getReference());
+
+    // Reference is determined using the "name" attribute
+    serviceDescriptor = kerberosDescriptor.getService("SERVICE2");
+    identities = serviceDescriptor.getIdentities(true, null);
+    Assert.assertEquals(1, identities.size());
+    Assert.assertEquals("/spnego", identities.get(0).getName());
+    Assert.assertNull(identities.get(0).getReference());
+  }
 }
\ No newline at end of file


[46/50] [abbrv] ambari git commit: AMBARI-22068. Remove user to group hardcoded logic in hooks.(vbrodetskyi)

Posted by rl...@apache.org.
AMBARI-22068. Remove user to group hardcoded logic in hooks.(vbrodetskyi)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3889ddfc
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3889ddfc
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3889ddfc

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 3889ddfca272dc4380f60fb1c7f8915998dc7603
Parents: 29ea0ff
Author: Vitaly Brodetskyi <vb...@hortonworks.com>
Authored: Mon Oct 2 11:17:33 2017 +0300
Committer: Vitaly Brodetskyi <vb...@hortonworks.com>
Committed: Mon Oct 2 11:17:33 2017 +0300

----------------------------------------------------------------------
 ambari-server/src/main/resources/configuration-schema.xsd | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/3889ddfc/ambari-server/src/main/resources/configuration-schema.xsd
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/configuration-schema.xsd b/ambari-server/src/main/resources/configuration-schema.xsd
index a67de9a..9350984 100644
--- a/ambari-server/src/main/resources/configuration-schema.xsd
+++ b/ambari-server/src/main/resources/configuration-schema.xsd
@@ -115,7 +115,7 @@
     <xs:element name="user-groups" minOccurs="0">
       <xs:complexType>
         <xs:sequence>
-          <xs:element name="property" type="userGroupInfo" minOccurs="1"/>
+          <xs:element name="property" type="userGroupInfo" minOccurs="1" maxOccurs="unbounded"/>
         </xs:sequence>
       </xs:complexType>
     </xs:element>


[20/50] [abbrv] ambari git commit: AMBARI-21799 : Hive query failing randomly with Null point exception status 500 (nitirajrathore)

Posted by rl...@apache.org.
AMBARI-21799 : Hive query failing randomly with Null point exception status 500 (nitirajrathore)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d198dd3c
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d198dd3c
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d198dd3c

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: d198dd3c43d491b8fb0689546522351bb9b303de
Parents: add33d4
Author: Nitiraj Singh Rathore <ni...@gmail.com>
Authored: Thu Sep 28 17:12:40 2017 +0530
Committer: Nitiraj Singh Rathore <ni...@gmail.com>
Committed: Thu Sep 28 17:14:27 2017 +0530

----------------------------------------------------------------------
 .../src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java   | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d198dd3c/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
----------------------------------------------------------------------
diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
index 8b987be..3db2081 100644
--- a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
+++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
@@ -500,7 +500,7 @@ public class HdfsApi {
         result = ugi.doAs(action);
         succeeded = true;
       } catch (IOException ex) {
-        if (!ex.getMessage().contains("Cannot obtain block length for")) {
+        if (!Strings.isNullOrEmpty(ex.getMessage()) && !ex.getMessage().contains("Cannot obtain block length for")) {
           throw ex;
         }
         if (tryNumber >= 3) {


[05/50] [abbrv] ambari git commit: AMBARI-21998. Ambari upgrade don't delete stale constraints (dlysnichenko)

Posted by rl...@apache.org.
AMBARI-21998. Ambari upgrade don't delete stale constraints (dlysnichenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/439a9ad0
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/439a9ad0
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/439a9ad0

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 439a9ad09d360d806befc386c5717ca576c3f221
Parents: a873626
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Tue Sep 26 18:58:22 2017 +0300
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Tue Sep 26 18:58:52 2017 +0300

----------------------------------------------------------------------
 .../server/upgrade/UpgradeCatalog260.java       | 25 ++++++++++++++++++++
 .../server/upgrade/UpgradeCatalog260Test.java   | 14 +++++++++++
 2 files changed, 39 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/439a9ad0/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
index 5003c13..c31469e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java
@@ -143,6 +143,12 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog {
   public static final String NOT_REQUIRED = "NOT_REQUIRED";
   public static final String CURRENT = "CURRENT";
   public static final String SELECTED = "1";
+  public static final String VIEWURL_TABLE = "viewurl";
+  public static final String PK_VIEWURL = "PK_viewurl";
+  public static final String URL_ID_COLUMN = "url_id";
+  public static final String STALE_POSTGRESS_VIEWURL_PKEY = "viewurl_pkey";
+  public static final String USERS_TABLE = "users";
+  public static final String STALE_POSTGRESS_USERS_LDAP_USER_KEY = "users_ldap_user_key";
 
 
   /**
@@ -189,6 +195,25 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog {
     updateRepositoryVersionTable();
     renameServiceDeletedColumn();
     expandUpgradeItemItemTextColumn();
+    addViewUrlPKConstraint();
+    removeStaleConstraints();
+  }
+
+
+  /**
+   * Updates {@value #VIEWURL_TABLE} table.
+   * Adds the {@value #PK_VIEWURL} constraint.
+   */
+  private void addViewUrlPKConstraint() throws SQLException {
+    dbAccessor.dropPKConstraint(VIEWURL_TABLE, STALE_POSTGRESS_VIEWURL_PKEY);
+    dbAccessor.addPKConstraint(VIEWURL_TABLE, PK_VIEWURL, URL_ID_COLUMN);
+  }
+
+  /**
+   * remove stale unnamed constraints
+   */
+  private void removeStaleConstraints() throws SQLException {
+    dbAccessor.dropUniqueConstraint(USERS_TABLE, STALE_POSTGRESS_USERS_LDAP_USER_KEY);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/ambari/blob/439a9ad0/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
index d0d5dac..cc58988 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java
@@ -201,6 +201,9 @@ public class UpgradeCatalog260Test {
     Capture<DBColumnInfo> unapped = newCapture();
     expectRenameServiceDeletedColumn(unapped);
 
+    expectAddViewUrlPKConstraint();
+    expectRemoveStaleConstraints();
+    
     replay(dbAccessor, configuration, connection, statement, resultSet);
 
     Injector injector = getInjector();
@@ -218,6 +221,17 @@ public class UpgradeCatalog260Test {
     verifyUpdateRepositoryVersionTableTable(repoVersionHiddenColumnCapture, repoVersionResolvedColumnCapture);
   }
 
+  private void expectRemoveStaleConstraints() throws SQLException {
+    dbAccessor.dropUniqueConstraint(eq(UpgradeCatalog260.USERS_TABLE), eq(UpgradeCatalog260.STALE_POSTGRESS_USERS_LDAP_USER_KEY));
+  }
+
+  private void expectAddViewUrlPKConstraint() throws SQLException {
+    dbAccessor.dropPKConstraint(eq(UpgradeCatalog260.VIEWURL_TABLE), eq(UpgradeCatalog260.STALE_POSTGRESS_VIEWURL_PKEY));
+    expectLastCall().once();
+    dbAccessor.addPKConstraint(eq(UpgradeCatalog260.VIEWURL_TABLE), eq(UpgradeCatalog260.PK_VIEWURL), eq(UpgradeCatalog260.URL_ID_COLUMN));
+    expectLastCall().once();
+  }
+
   public void expectDropStaleTables() throws SQLException {
     dbAccessor.dropTable(eq(UpgradeCatalog260.CLUSTER_CONFIG_MAPPING_TABLE));
     expectLastCall().once();


[06/50] [abbrv] ambari git commit: AMBARI-22060. Fail to restart Ranger Admin during HDP downgrade. (dlysnichenko)

Posted by rl...@apache.org.
AMBARI-22060. Fail to restart Ranger Admin during HDP downgrade. (dlysnichenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1a90a522
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1a90a522
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1a90a522

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 1a90a5221f0af01160d1d3fe5986d4e76977fb7d
Parents: 439a9ad
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Tue Sep 26 18:59:50 2017 +0300
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Tue Sep 26 19:00:33 2017 +0300

----------------------------------------------------------------------
 .../server/serveraction/upgrades/FinalizeUpgradeAction.java     | 5 +++++
 .../org/apache/ambari/server/state/cluster/ClusterImpl.java     | 2 +-
 2 files changed, 6 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/1a90a522/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
index 5ec0692..451f802 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FinalizeUpgradeAction.java
@@ -202,6 +202,11 @@ public class FinalizeUpgradeAction extends AbstractUpgradeServerAction {
       // longer used
       finalizeHostRepositoryVersions(cluster);
 
+      if (upgradeContext.getOrchestrationType() == RepositoryType.STANDARD) {
+        outSB.append(String.format("Finalizing the version for cluster %s.\n", cluster.getClusterName()));
+        cluster.setCurrentStackVersion(cluster.getDesiredStackVersion());
+      }
+
       // mark revertable
       if (repositoryType.isRevertable() && direction == Direction.UPGRADE) {
         UpgradeEntity upgrade = cluster.getUpgradeInProgress();

http://git-wip-us.apache.org/repos/asf/ambari/blob/1a90a522/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
index 8f1a882..9c0b0ca 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java
@@ -2415,7 +2415,7 @@ public class ClusterImpl implements Cluster {
       // since the entities which were modified came from the cluster entity's
       // list to begin with, we can just save them right back - no need for a
       // new collection since the entity instances were modified directly
-      clusterEntity = clusterDAO.merge(clusterEntity);
+      clusterEntity = clusterDAO.merge(clusterEntity, true);
 
       cacheConfigurations();
 


[47/50] [abbrv] ambari git commit: AMBARI-22106 Log Search UI: incorrect link to third-party scripts bundle. (ababiichuk)

Posted by rl...@apache.org.
AMBARI-22106 Log Search UI: incorrect link to third-party scripts bundle. (ababiichuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f92da65f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f92da65f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f92da65f

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: f92da65f7d643d40e7cde0113b1f34c608fb7827
Parents: 3889ddf
Author: ababiichuk <ab...@hortonworks.com>
Authored: Mon Oct 2 11:02:18 2017 +0300
Committer: ababiichuk <ab...@hortonworks.com>
Committed: Mon Oct 2 11:48:37 2017 +0300

----------------------------------------------------------------------
 .../ambari-logsearch-web/webpack.config.js      | 21 ++++++++++----------
 1 file changed, 11 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f92da65f/ambari-logsearch/ambari-logsearch-web/webpack.config.js
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/webpack.config.js b/ambari-logsearch/ambari-logsearch-web/webpack.config.js
index c608d10..7a60df2 100644
--- a/ambari-logsearch/ambari-logsearch-web/webpack.config.js
+++ b/ambari-logsearch/ambari-logsearch-web/webpack.config.js
@@ -75,6 +75,8 @@ const postcssPlugins = function () {
         ].concat(minimizeCss ? [cssnano(minimizeOptions)] : []);
     };
 
+const resourcesDirName = 'resources';
+
 module.exports = {
   "resolve": {
     "extensions": [
@@ -109,9 +111,9 @@ module.exports = {
     ]
   },
   "output": {
-    "path": path.join(process.cwd(), "dist/resources"),
-    "publicPath": "resources/",
-    "filename": "[name].bundle.js",
+    "path": path.join(process.cwd(), "dist"),
+    "publicPath": "",
+    "filename": `${resourcesDirName}/[name].bundle.js`,
     "chunkFilename": "[id].chunk.js"
   },
   "module": {
@@ -130,11 +132,11 @@ module.exports = {
       },
       {
         "test": /\.(eot|svg|cur)$/,
-        "loader": "file-loader?name=[name].[ext]"
+        "loader": `file-loader?name=${resourcesDirName}/[name].[ext]`
       },
       {
         "test": /\.(jpg|png|webp|gif|otf|ttf|woff|woff2|ani)$/,
-        "loader": "url-loader?name=[name].[ext]&limit=10000"
+        "loader": `url-loader?name=${resourcesDirName}/[name].[ext]&limit=10000`
       },
       {
         "exclude": [
@@ -408,7 +410,7 @@ module.exports = {
       "uglify": false,
       "sourceMap": true,
       "name": "scripts",
-      "fileName": "../resources/[name].bundle.js",
+      "fileName": `${resourcesDirName}/[name].bundle.js`,
       "filesToConcat": [
         "node_modules/jquery/dist/jquery.min.js",
         "node_modules/bootstrap/dist/js/bootstrap.min.js",
@@ -421,7 +423,7 @@ module.exports = {
     new CopyWebpackPlugin([
       {
         "context": "src/",
-        "to": "",
+        "to": resourcesDirName,
         "from": {
           "glob": "assets/**/*",
           "dot": true
@@ -429,7 +431,7 @@ module.exports = {
       },
       {
         "context": "src/",
-        "to": "../favicon.ico",
+        "to": "favicon.ico",
         "from": {
           "glob": "favicon.ico",
           "dot": true
@@ -449,7 +451,7 @@ module.exports = {
     new NamedLazyChunksWebpackPlugin(),
     new HtmlWebpackPlugin({
       "template": "./src/index.html",
-      "filename": "../index.html",
+      "filename": "index.html",
       "hash": false,
       "inject": true,
       "compile": true,
@@ -533,7 +535,6 @@ module.exports = {
     "setImmediate": false
   },
   "devServer": {
-    "contentBase": path.join(process.cwd(), "dist"),
     "historyApiFallback": true
   }
 };


[22/50] [abbrv] ambari git commit: AMBARI-22074 Confusing AMS collector heap size validation loop (dsen)

Posted by rl...@apache.org.
AMBARI-22074 Confusing AMS collector heap size validation loop (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b660bf1a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b660bf1a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b660bf1a

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: b660bf1a30fd324d099f0cb3af1e525b4364a2d7
Parents: c7c62c0
Author: Dmytro Sen <ds...@apache.org>
Authored: Thu Sep 28 17:17:32 2017 +0300
Committer: Dmytro Sen <ds...@apache.org>
Committed: Thu Sep 28 17:17:32 2017 +0300

----------------------------------------------------------------------
 .../stacks/HDP/2.0.6/services/stack_advisor.py  | 36 +++++---------------
 1 file changed, 8 insertions(+), 28 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b660bf1a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
index 0249ea2..5307176 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/stack_advisor.py
@@ -1105,36 +1105,16 @@ class HDP206StackAdvisor(DefaultStackAdvisor):
 
   def validateAmsEnvConfigurations(self, properties, recommendedDefaults, configurations, services, hosts):
 
-    ams_env = getSiteProperties(configurations, "ams-env")
-    mb = 1024 * 1024
-    gb = 1024 * mb
     validationItems = []
+    collectorHeapsizeDefaultItem = self.validatorLessThenDefaultValue(properties, recommendedDefaults, "metrics_collector_heapsize")
+    validationItems.extend([{"config-name": "metrics_collector_heapsize", "item": collectorHeapsizeDefaultItem}])
+
+    ams_env = getSiteProperties(configurations, "ams-env")
     collector_heapsize = self.to_number(ams_env.get("metrics_collector_heapsize"))
-    amsCollectorHosts = self.getComponentHostNames(services, "AMBARI_METRICS", "METRICS_COLLECTOR")
-    for collectorHostName in amsCollectorHosts:
-      for host in hosts["items"]:
-        if host["Hosts"]["host_name"] == collectorHostName:
-          hostComponents = []
-          for service in services["services"]:
-            for component in service["components"]:
-              if component["StackServiceComponents"]["hostnames"] is not None:
-                if collectorHostName in component["StackServiceComponents"]["hostnames"]:
-                  hostComponents.append(component["StackServiceComponents"]["component_name"])
-
-          requiredMemory = self.getMemorySizeRequired(services, hostComponents, configurations)
-          unusedMemory = host["Hosts"]["total_mem"] * 1024 - requiredMemory # in bytes
-          collector_needs_increase = collector_heapsize * mb < 16 * gb
-
-          if unusedMemory > 4*gb and collector_needs_increase:  # warn user, if more than 4GB RAM is unused
-            recommended_collector_heapsize = int((unusedMemory - 4*gb)/5) + collector_heapsize * mb
-            recommended_collector_heapsize = min(16*gb, recommended_collector_heapsize) #Make sure heapsize <= 16GB
-            recommended_collector_heapsize = round_to_n(recommended_collector_heapsize/mb,128) # Round to 128m multiple
-            if collector_heapsize < recommended_collector_heapsize:
-              validation_msg = "Consider allocating {0} MB to metrics_collector_heapsize in ams-env to use up some " \
-                               "unused memory on host"
-              collectorHeapsizeItem = self.getWarnItem(validation_msg.format(recommended_collector_heapsize))
-              validationItems.extend([{"config-name": "metrics_collector_heapsize", "item": collectorHeapsizeItem}])
-    pass
+    if collector_heapsize > 32768:
+      collectorHeapsizeMaxItem = self.getWarnItem("Value is more than the recommended maximum heap size of 32G.")
+      validationItems.extend([{"config-name": "metrics_collector_heapsize", "item": collectorHeapsizeMaxItem}])
+
     return self.toConfigurationValidationProblems(validationItems, "ams-env")
 
   def getMemorySizeRequired(self, services, components, configurations):


[10/50] [abbrv] ambari git commit: AMBARI-22064. Remove Database auto configuration from Infra Manager (oleewere)

Posted by rl...@apache.org.
AMBARI-22064. Remove Database auto configuration from Infra Manager (oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3e1a5cb7
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3e1a5cb7
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3e1a5cb7

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 3e1a5cb7c1e7034d82aa8acfe3b1fd33e11cdb04
Parents: 6fb05cb
Author: Oliver Szabo <ol...@gmail.com>
Authored: Tue Sep 26 20:22:54 2017 +0200
Committer: Oliver Szabo <ol...@gmail.com>
Committed: Wed Sep 27 11:26:23 2017 +0200

----------------------------------------------------------------------
 .../src/main/java/org/apache/ambari/infra/InfraManager.java      | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/3e1a5cb7/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/InfraManager.java
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/InfraManager.java b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/InfraManager.java
index 185e344..179b2d1 100644
--- a/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/InfraManager.java
+++ b/ambari-infra/ambari-infra-manager/src/main/java/org/apache/ambari/infra/InfraManager.java
@@ -22,6 +22,7 @@ import org.springframework.boot.Banner;
 import org.springframework.boot.autoconfigure.SpringBootApplication;
 import org.springframework.boot.autoconfigure.batch.BatchAutoConfiguration;
 import org.springframework.boot.autoconfigure.data.rest.RepositoryRestMvcAutoConfiguration;
+import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration;
 import org.springframework.boot.autoconfigure.security.SecurityAutoConfiguration;
 import org.springframework.boot.autoconfigure.web.WebMvcAutoConfiguration;
 import org.springframework.boot.builder.SpringApplicationBuilder;
@@ -33,7 +34,8 @@ import org.springframework.boot.system.ApplicationPidFileWriter;
     RepositoryRestMvcAutoConfiguration.class,
     WebMvcAutoConfiguration.class,
     BatchAutoConfiguration.class,
-    SecurityAutoConfiguration.class
+    SecurityAutoConfiguration.class,
+    DataSourceAutoConfiguration.class
   }
 )
 public class InfraManager {


[31/50] [abbrv] ambari git commit: AMBARI-22094 Install Wizard: refactor stack versions step. (atkach)

Posted by rl...@apache.org.
AMBARI-22094 Install Wizard: refactor stack versions step. (atkach)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f5f888cd
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f5f888cd
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f5f888cd

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: f5f888cd7e7297e514537ce4754397590a0dc62f
Parents: 504094e
Author: Andrii Tkach <at...@apache.org>
Authored: Fri Sep 29 17:44:37 2017 +0300
Committer: Andrii Tkach <at...@apache.org>
Committed: Fri Sep 29 17:44:37 2017 +0300

----------------------------------------------------------------------
 ambari-web/app/controllers/installer.js       |  8 ++---
 ambari-web/app/styles/stack_versions.less     |  2 --
 ambari-web/test/controllers/installer_test.js | 37 ++++++++++++++++------
 3 files changed, 30 insertions(+), 17 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f5f888cd/ambari-web/app/controllers/installer.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/controllers/installer.js b/ambari-web/app/controllers/installer.js
index 4c12a5b..3c50b26 100644
--- a/ambari-web/app/controllers/installer.js
+++ b/ambari-web/app/controllers/installer.js
@@ -930,9 +930,9 @@ App.InstallerController = App.WizardController.extend(App.Persist, {
         callback: function () {
           var dfd = $.Deferred();
 
-          this.loadStacks().always(function() {
+          this.loadStacks().done(function(stacksLoaded) {
             App.router.get('clusterController').loadAmbariProperties().always(function() {
-              dfd.resolve();
+              dfd.resolve(stacksLoaded);
             });
           });
 
@@ -946,9 +946,7 @@ App.InstallerController = App.WizardController.extend(App.Persist, {
 
           if (!stacksLoaded) {
             $.when.apply(this, this.loadStacksVersions()).done(function () {
-              Em.run.later('sync', function() {
-                dfd.resolve(stacksLoaded);
-              }, 1000);
+              dfd.resolve(true);
             });
           } else {
             dfd.resolve(stacksLoaded);

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5f888cd/ambari-web/app/styles/stack_versions.less
----------------------------------------------------------------------
diff --git a/ambari-web/app/styles/stack_versions.less b/ambari-web/app/styles/stack_versions.less
index 0221311..28a3239 100644
--- a/ambari-web/app/styles/stack_versions.less
+++ b/ambari-web/app/styles/stack_versions.less
@@ -468,8 +468,6 @@
   }
   .task-list-main-wrap i {
     font-size: 16px;
-    color: #0088cc;
-    margin-right: 3px;
   }
   ul.failed-info-list {
     max-height: 500px;

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5f888cd/ambari-web/test/controllers/installer_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/controllers/installer_test.js b/ambari-web/test/controllers/installer_test.js
index bc91a8e..65a1e5c 100644
--- a/ambari-web/test/controllers/installer_test.js
+++ b/ambari-web/test/controllers/installer_test.js
@@ -460,35 +460,52 @@ describe('App.InstallerController', function () {
       var checker = {
         loadStacks: function() {
           return {
-            always: function() {
-              loadStacks = true;
+            done: function(callback) {
+              callback(true);
             }
           };
         }
       };
 
       beforeEach(function () {
+        sinon.spy(checker, 'loadStacks');
         installerController.loadMap['1'][0].callback.call(checker);
       });
 
-      it('stack info is loaded', function () {
-        expect(loadStacks).to.be.true;
+      afterEach(function() {
+        checker.loadStacks.restore();
+      });
+
+      it('should call loadStacks, stack info not loaded', function () {
+        expect(checker.loadStacks.calledOnce).to.be.true;
       });
     });
 
-    describe ('Should load stacks async', function() {
-      var loadStacksVersions = false;
+    describe('Should load stacks async', function() {
       var checker = {
-        loadStacksVersions: function() {
-          loadStacksVersions = true;
-        }
+        loadStacksVersions: Em.K
       };
 
+      beforeEach(function () {
+        sinon.spy(checker, 'loadStacksVersions');
+      });
+
+      afterEach(function() {
+        checker.loadStacksVersions.restore();
+      });
+
       it('stack versions are loaded', function () {
         installerController.loadMap['1'][1].callback.call(checker, true).then(function(data){
           expect(data).to.be.true;
         });
-        expect(loadStacksVersions).to.be.false;
+        expect(checker.loadStacksVersions.called).to.be.false;
+      });
+
+      it('should call loadStacksVersions, stack versions not loaded', function () {
+        installerController.loadMap['1'][1].callback.call(checker, false).then(function(data){
+          expect(data).to.be.true;
+        });
+        expect(checker.loadStacksVersions.calledOnce).to.be.true;
       });
     });
 


[32/50] [abbrv] ambari git commit: AMBARI-22089. Hive View 2.0 - Unable to update existing saved queries, view creates new records under saved queries tab (pallavkul)

Posted by rl...@apache.org.
AMBARI-22089. Hive View 2.0 - Unable to update existing saved queries, view creates new records under saved queries tab (pallavkul)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d3b67eea
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d3b67eea
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d3b67eea

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: d3b67eeab455b01f8f921039b50818a6ded32839
Parents: f5f888c
Author: pallavkul <pa...@gmail.com>
Authored: Fri Sep 29 20:50:43 2017 +0530
Committer: pallavkul <pa...@gmail.com>
Committed: Fri Sep 29 20:50:43 2017 +0530

----------------------------------------------------------------------
 .../savedQueries/SavedQueryResourceManager.java | 17 ++++--
 .../resources/ui/app/routes/queries/query.js    | 63 +++++++++++++++-----
 .../hive20/src/main/resources/ui/yarn.lock      |  2 +-
 3 files changed, 61 insertions(+), 21 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d3b67eea/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/savedQueries/SavedQueryResourceManager.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/savedQueries/SavedQueryResourceManager.java b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/savedQueries/SavedQueryResourceManager.java
index 3690683..fff202c 100644
--- a/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/savedQueries/SavedQueryResourceManager.java
+++ b/contrib/views/hive20/src/main/java/org/apache/ambari/view/hive20/resources/savedQueries/SavedQueryResourceManager.java
@@ -115,12 +115,17 @@ public class SavedQueryResourceManager extends PersonalCRUDResourceManager<Saved
   }
 
   @Override
-  public SavedQuery update(SavedQuery newObject, String id) throws ItemNotFound {
-    SavedQuery savedQuery = super.update(newObject, id);
-    // Emptying short query so that in next read, this gets updated with proper value
-    // from the queryFile
-    emptyShortQueryField(savedQuery);
-    return savedQuery;
+  public SavedQuery update(SavedQuery object, String id) throws ItemNotFound {
+    String query = object.getShortQuery();
+    object.setShortQuery(makeShortQuery(query));
+    object = super.update(object, id);
+    try {
+      createDefaultQueryFile(object, query);
+
+    } catch (ServiceFormattedException e) {
+      cleanupAfterErrorAndThrowAgain(object, e);
+    }
+    return object;
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/ambari/blob/d3b67eea/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
index f6256898..291c335 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
@@ -584,44 +584,79 @@ export default Ember.Route.extend(UILoggerMixin, {
       let owner = this.get('controller.model').get('owner');
       let queryFile = this.get('controller.model').get('queryFile');
       let logFile = this.get('controller.model').get('logFile');
+      let shortQuery = (currentQuery.length > 0) ? currentQuery : ";";
+      let savedQueryId = this.get('controller.model').get('id');
 
-      let payload = {"title" : newTitle,
-        "dataBase": selectedDb,
-        "owner" : owner,
-        "shortQuery" : (currentQuery.length > 0) ? currentQuery : ";",
-        "queryFile" : queryFile,
-        "logFile" : logFile};
+
+      this.store.findAll('savedQuery').then(savedQueries => {
+        return savedQueries.toArray();
+      }).then((existingSavedQueries) =>{
+
+        var queryExist = existingSavedQueries.filterBy('id', savedQueryId).get('firstObject');
+
+        if(queryExist){
+          this.send('updateSavedQuery',  queryExist.get('id'));
+        } else{
+          this.send('addSavedQuery', selectedDb, newTitle, owner, shortQuery );
+        }
+
+
+      });
+
+    },
+
+    addSavedQuery(selectedDb, newTitle, owner, shortQuery){
 
       let newSaveQuery = this.get('store').createRecord('saved-query',
         { dataBase:selectedDb,
           title:newTitle,
-          queryFile: queryFile,
           owner: owner,
-          shortQuery: (currentQuery.length > 0) ? currentQuery : ";"
+          shortQuery: shortQuery
         });
 
-
       newSaveQuery.save().then((data) => {
-        console.log('saved query saved');
-
         this.get('controller.model').set('title', newTitle);
         this.get('controller.model').set('isQueryDirty', false);
         this.get('controller').set('worksheetModalSuccess', true);
-
         Ember.run.later(() => {
           this.get('controller').set('showWorksheetModal', false);
           this.closeWorksheetAfterSave();
         }, 2 * 1000);
-
       });
 
     },
 
+    updateSavedQuery(savedQueryId){
+          let currentQuery = this.get('controller.model').get('query');
+          let selectedDb = this.get('controller.model').get('selectedDb');
+          let owner = this.get('controller.model').get('owner');
+
+          this.get('store').findRecord('saved-query', savedQueryId ).then(savedQuery => {
+            savedQuery.set('shortQuery', (currentQuery.length > 0) ? currentQuery : ";");
+            savedQuery.set('dataBase', selectedDb );
+            savedQuery.set('owner', owner );
+
+            savedQuery.save().then(savedQuery => {
+
+                this.get('controller.model').set('isQueryDirty', false);
+                this.get('controller').set('worksheetModalSuccess', true);
+
+                Ember.run.later(() => {
+                  this.get('controller').set('showWorksheetModal', false);
+                  this.closeWorksheetAfterSave();
+                }, 2 * 1000);
+            })
+
+          });
+
+
+    },
+
     closeWorksheetModal(){
       this.get('controller').set('showWorksheetModal', false);
       this.closeWorksheetAfterSave();
       this.get('controller.model').set('tabDataToClose', null);
-  },
+    },
 
     expandQueryEdidorPanel(){
       if(!this.get('isQueryEdidorPaneExpanded')){

http://git-wip-us.apache.org/repos/asf/ambari/blob/d3b67eea/contrib/views/hive20/src/main/resources/ui/yarn.lock
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/yarn.lock b/contrib/views/hive20/src/main/resources/ui/yarn.lock
index 477a15c..607cf81 100644
--- a/contrib/views/hive20/src/main/resources/ui/yarn.lock
+++ b/contrib/views/hive20/src/main/resources/ui/yarn.lock
@@ -569,7 +569,7 @@ babel-plugin-transform-es2015-block-scoped-functions@^6.22.0:
   dependencies:
     babel-runtime "^6.22.0"
 
-babel-plugin-transform-es2015-block-scoping@^6.23.0:
+babel-plugin-transform-es2015-block-scoping@^6.23.0, babel-plugin-transform-es2015-block-scoping@^6.24.1:
   version "6.24.1"
   resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-block-scoping/-/babel-plugin-transform-es2015-block-scoping-6.24.1.tgz#76c295dc3a4741b1665adfd3167215dcff32a576"
   dependencies:


[28/50] [abbrv] ambari git commit: AMBARI-22093 Log Search UI: implement service logs actions functionality. (ababiichuk)

Posted by rl...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/services/component-generator.service.spec.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/component-generator.service.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/component-generator.service.spec.ts
index b6ec8d7..f043f42 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/services/component-generator.service.spec.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/services/component-generator.service.spec.ts
@@ -25,8 +25,10 @@ import {AuditLogsFieldsService, auditLogsFields} from '@app/services/storage/aud
 import {ServiceLogsFieldsService, serviceLogsFields} from '@app/services/storage/service-logs-fields.service';
 import {ServiceLogsHistogramDataService, serviceLogsHistogramData} from '@app/services/storage/service-logs-histogram-data.service';
 import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service';
+import {AppStateService, appState} from '@app/services/storage/app-state.service';
 import {ClustersService, clusters} from '@app/services/storage/clusters.service';
 import {ComponentsService, components} from '@app/services/storage/components.service';
+import {ServiceLogsTruncatedService, serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service';
 import {LogsContainerService} from '@app/services/logs-container.service';
 import {HttpClientService} from '@app/services/http-client.service';
 import {FilteringService} from '@app/services/filtering.service';
@@ -53,8 +55,10 @@ describe('ComponentGeneratorService', () => {
           serviceLogsFields,
           serviceLogsHistogramData,
           appSettings,
+          appState,
           clusters,
-          components
+          components,
+          serviceLogsTruncated
         })
       ],
       providers: [
@@ -72,8 +76,10 @@ describe('ComponentGeneratorService', () => {
         ServiceLogsFieldsService,
         ServiceLogsHistogramDataService,
         AppSettingsService,
+        AppStateService,
         ClustersService,
-        ComponentsService
+        ComponentsService,
+        ServiceLogsTruncatedService
       ]
     });
   });

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/services/http-client.service.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/http-client.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/http-client.service.ts
index 8fed570..44a5f6a 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/services/http-client.service.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/services/http-client.service.ts
@@ -23,6 +23,7 @@ import {Http, XHRBackend, Request, RequestOptions, RequestOptionsArgs, Response,
 import {AuditLogsQueryParams} from '@app/classes/queries/audit-logs-query-params.class';
 import {ServiceLogsQueryParams} from '@app/classes/queries/service-logs-query-params.class';
 import {ServiceLogsHistogramQueryParams} from '@app/classes/queries/service-logs-histogram-query-params.class';
+import {ServiceLogsTruncatedQueryParams} from '@app/classes/queries/service-logs-truncated-query-params.class';
 import {AppStateService} from '@app/services/storage/app-state.service';
 
 @Injectable()
@@ -56,6 +57,10 @@ export class HttpClientService extends Http {
     serviceLogsFields: {
       url: 'service/logs/schema/fields'
     },
+    serviceLogsTruncated: {
+      url: 'service/logs/truncated',
+      params: opts => new ServiceLogsTruncatedQueryParams(opts)
+    },
     components: {
       url: 'service/logs/components'
     },
@@ -88,7 +93,7 @@ export class HttpClientService extends Http {
     const preset = this.endPoints[url],
       rawParams = preset && preset.params ? preset.params(params) : params;
     if (rawParams) {
-      const paramsString = Object.keys(rawParams).map(key => `${key}=${rawParams[key]}`).join('&'),
+      const paramsString = Object.keys(rawParams).map((key: string): string => `${key}=${rawParams[key]}`).join('&'),
         urlParams = new URLSearchParams(paramsString, {
           encodeKey: key => key,
           encodeValue: value => encodeURIComponent(value)

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-container.service.spec.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-container.service.spec.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-container.service.spec.ts
index 8ebbd72..a762fef 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-container.service.spec.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-container.service.spec.ts
@@ -24,9 +24,11 @@ import {AuditLogsFieldsService, auditLogsFields} from '@app/services/storage/aud
 import {ServiceLogsFieldsService, serviceLogsFields} from '@app/services/storage/service-logs-fields.service';
 import {ServiceLogsHistogramDataService, serviceLogsHistogramData} from '@app/services/storage/service-logs-histogram-data.service';
 import {AppSettingsService, appSettings} from '@app/services/storage/app-settings.service';
+import {AppStateService, appState} from '@app/services/storage/app-state.service';
 import {ClustersService, clusters} from '@app/services/storage/clusters.service';
 import {ComponentsService, components} from '@app/services/storage/components.service';
 import {HostsService, hosts} from '@app/services/storage/hosts.service';
+import {ServiceLogsTruncatedService, serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service';
 import {HttpClientService} from '@app/services/http-client.service';
 import {FilteringService} from '@app/services/filtering.service';
 
@@ -51,9 +53,11 @@ describe('LogsContainerService', () => {
           serviceLogsFields,
           serviceLogsHistogramData,
           appSettings,
+          appState,
           clusters,
           components,
-          hosts
+          hosts,
+          serviceLogsTruncated
         })
       ],
       providers: [
@@ -63,9 +67,11 @@ describe('LogsContainerService', () => {
         ServiceLogsFieldsService,
         ServiceLogsHistogramDataService,
         AppSettingsService,
+        AppStateService,
         ClustersService,
         ComponentsService,
         HostsService,
+        ServiceLogsTruncatedService,
         LogsContainerService,
         {
           provide: HttpClientService,

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-container.service.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-container.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-container.service.ts
index bef28cf..14e9ad4 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-container.service.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/services/logs-container.service.ts
@@ -24,11 +24,24 @@ import {AuditLogsFieldsService} from '@app/services/storage/audit-logs-fields.se
 import {ServiceLogsService} from '@app/services/storage/service-logs.service';
 import {ServiceLogsFieldsService} from '@app/services/storage/service-logs-fields.service';
 import {ServiceLogsHistogramDataService} from '@app/services/storage/service-logs-histogram-data.service';
+import {ServiceLogsTruncatedService} from '@app/services/storage/service-logs-truncated.service';
+import {AppStateService} from '@app/services/storage/app-state.service';
+import {ActiveServiceLogEntry} from '@app/classes/active-service-log-entry.class';
 
 @Injectable()
 export class LogsContainerService {
 
-  constructor(private httpClient: HttpClientService, private auditLogsStorage: AuditLogsService, private auditLogsFieldsStorage: AuditLogsFieldsService, private serviceLogsStorage: ServiceLogsService, private serviceLogsFieldsStorage: ServiceLogsFieldsService, private serviceLogsHistogramStorage: ServiceLogsHistogramDataService, private filtering: FilteringService) {
+  constructor(private httpClient: HttpClientService, private auditLogsStorage: AuditLogsService, private auditLogsFieldsStorage: AuditLogsFieldsService, private serviceLogsStorage: ServiceLogsService, private serviceLogsFieldsStorage: ServiceLogsFieldsService, private serviceLogsHistogramStorage: ServiceLogsHistogramDataService, private serviceLogsTruncatedStorage: ServiceLogsTruncatedService, private appState: AppStateService, private filtering: FilteringService) {
+    appState.getParameter('activeLog').subscribe((value: ActiveServiceLogEntry | null) => this.activeLog = value);
+    appState.getParameter('isServiceLogsFileView').subscribe((value: boolean): void => {
+      const activeLog = this.activeLog,
+        filtersForm = this.filtering.filtersForm;
+      if (value && activeLog) {
+        filtersForm.controls.hosts.setValue(activeLog.host_name);
+        filtersForm.controls.components.setValue(activeLog.component_name);
+      }
+      this.isServiceLogsFileView = value;
+    });
   }
 
   readonly colors = {
@@ -77,26 +90,65 @@ export class LogsContainerService {
 
   totalCount: number = 0;
 
+  isServiceLogsFileView: boolean = false;
+
+  activeLog: ActiveServiceLogEntry | null = null;
+
   loadLogs(logsType: string): void {
     this.httpClient.get(logsType, this.getParams('listFilters')).subscribe(response => {
-      const jsonResponse = response.json();
-      this.logsTypeMap[logsType].logsModel.clear();
+      const jsonResponse = response.json(),
+        model = this.logsTypeMap[logsType].logsModel;
+      model.clear();
       if (jsonResponse) {
         const logs = jsonResponse.logList,
           count = jsonResponse.totalCount || 0;
         if (logs) {
-          this.serviceLogsStorage.addInstances(logs);
+          model.addInstances(logs);
         }
         this.totalCount = count;
       }
     });
-    this.httpClient.get('serviceLogsHistogram', this.getParams('histogramFilters')).subscribe(response => {
+    if (logsType === 'serviceLogs') {
+      // TODO rewrite to implement conditional data loading for service logs histogram or audit logs graph
+      this.httpClient.get('serviceLogsHistogram', this.getParams('histogramFilters')).subscribe(response => {
+        const jsonResponse = response.json();
+        this.serviceLogsHistogramStorage.clear();
+        if (jsonResponse) {
+          const histogramData = jsonResponse.graphData;
+          if (histogramData) {
+            this.serviceLogsHistogramStorage.addInstances(histogramData);
+          }
+        }
+      });
+    }
+  }
+
+  loadLogContext(id: string, hostName: string, componentName: string, scrollType: 'before' | 'after' | '' = ''): void {
+    const params = {
+      id: id,
+      host_name: hostName,
+      component_name: componentName,
+      scrollType: scrollType
+    };
+    this.httpClient.get('serviceLogsTruncated', params).subscribe(response => {
       const jsonResponse = response.json();
-      this.serviceLogsHistogramStorage.clear();
+      if (!scrollType) {
+        this.serviceLogsTruncatedStorage.clear();
+      }
       if (jsonResponse) {
-        const histogramData = jsonResponse.graphData;
-        if (histogramData) {
-          this.serviceLogsHistogramStorage.addInstances(histogramData);
+        const logs = jsonResponse.logList;
+        if (logs) {
+          if (scrollType === 'before') {
+            this.serviceLogsTruncatedStorage.addInstancesToStart(logs);
+          } else {
+            this.serviceLogsTruncatedStorage.addInstances(logs);
+          }
+          if (!scrollType) {
+            this.appState.setParameters({
+              isServiceLogContextView: true,
+              activeLog: params
+            });
+          }
         }
       }
     });

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/reducers.service.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/reducers.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/reducers.service.ts
index 08f237d..ca8a632 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/reducers.service.ts
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/reducers.service.ts
@@ -27,6 +27,7 @@ import {graphs} from '@app/services/storage/graphs.service';
 import {hosts} from '@app/services/storage/hosts.service';
 import {serviceLogs} from '@app/services/storage/service-logs.service';
 import {serviceLogsHistogramData} from '@app/services/storage/service-logs-histogram-data.service';
+import {serviceLogsTruncated} from '@app/services/storage/service-logs-truncated.service';
 import {serviceLogsFields} from '@app/services/storage/service-logs-fields.service';
 import {auditLogsFields} from '@app/services/storage/audit-logs-fields.service';
 import {userConfigs} from '@app/services/storage/user-configs.service';
@@ -37,6 +38,7 @@ export const reducers = {
   auditLogs,
   serviceLogs,
   serviceLogsHistogramData,
+  serviceLogsTruncated,
   graphs,
   hosts,
   userConfigs,

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/service-logs-truncated.service.ts
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/service-logs-truncated.service.ts b/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/service-logs-truncated.service.ts
new file mode 100644
index 0000000..f8fe0f7
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-web/src/app/services/storage/service-logs-truncated.service.ts
@@ -0,0 +1,32 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import {Injectable} from '@angular/core';
+import {Store} from '@ngrx/store';
+import {AppStore, CollectionModelService, getCollectionReducer} from '@app/models/store.model';
+
+export const modelName = 'serviceLogsTruncated';
+
+@Injectable()
+export class ServiceLogsTruncatedService extends CollectionModelService {
+  constructor(store: Store<AppStore>) {
+    super(modelName, store);
+  }
+}
+
+export const serviceLogsTruncated = getCollectionReducer(modelName);

http://git-wip-us.apache.org/repos/asf/ambari/blob/8852edd2/ambari-logsearch/ambari-logsearch-web/src/assets/i18n/en.json
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-web/src/assets/i18n/en.json b/ambari-logsearch/ambari-logsearch-web/src/assets/i18n/en.json
index 0d40bc7..1dee559 100644
--- a/ambari-logsearch/ambari-logsearch-web/src/assets/i18n/en.json
+++ b/ambari-logsearch/ambari-logsearch-web/src/assets/i18n/en.json
@@ -4,6 +4,7 @@
   "modal.submit": "OK",
   "modal.cancel": "Cancel",
   "modal.apply": "Apply",
+  "modal.close": "Close",
 
   "authorization.name": "Username",
   "authorization.password": "Password",
@@ -141,5 +142,9 @@
   "logs.versionNote": "Version Note",
   "logs.versionNumber": "Version Number",
   "logs.addToQuery": "Add to Query",
-  "logs.excludeFromQuery": "Exclude from Query"
+  "logs.excludeFromQuery": "Exclude from Query",
+  "logs.copy": "Copy",
+  "logs.open": "Open Log",
+  "logs.context": "Context",
+  "logs.loadMore": "Load more"
 }


[02/50] [abbrv] ambari git commit: AMBARI-22055. Make Log Search IT work with the new UI (oleewere)

Posted by rl...@apache.org.
AMBARI-22055. Make Log Search IT work with the new UI (oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/fe7a8e4b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/fe7a8e4b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/fe7a8e4b

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: fe7a8e4bb681720021c3d4dc60f02ef422aaf3f7
Parents: 6a99bd8
Author: Oliver Szabo <ol...@gmail.com>
Authored: Mon Sep 25 21:09:43 2017 +0200
Committer: Oliver Szabo <ol...@gmail.com>
Committed: Tue Sep 26 13:51:01 2017 +0200

----------------------------------------------------------------------
 .../java/org/apache/ambari/logsearch/steps/LogSearchUISteps.java | 4 +---
 .../src/test/resources/stories/selenium/login.ui.story           | 4 ++--
 2 files changed, 3 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/fe7a8e4b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/LogSearchUISteps.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/LogSearchUISteps.java b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/LogSearchUISteps.java
index b40a2bc..1b7c634 100644
--- a/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/LogSearchUISteps.java
+++ b/ambari-logsearch/ambari-logsearch-it/src/test/java/org/apache/ambari/logsearch/steps/LogSearchUISteps.java
@@ -22,11 +22,9 @@ import junit.framework.Assert;
 import org.apache.ambari.logsearch.domain.StoryDataRegistry;
 import org.apache.ambari.logsearch.web.Home;
 import org.jbehave.core.annotations.AfterScenario;
-import org.jbehave.core.annotations.AfterStories;
 import org.jbehave.core.annotations.AfterStory;
 import org.jbehave.core.annotations.BeforeScenario;
 import org.jbehave.core.annotations.BeforeStories;
-import org.jbehave.core.annotations.BeforeStory;
 import org.jbehave.core.annotations.Given;
 import org.jbehave.core.annotations.Named;
 import org.jbehave.core.annotations.Then;
@@ -92,7 +90,7 @@ public class LogSearchUISteps extends AbstractLogSearchSteps {
     LOG.info("Type password: {}", password);
     home.findElement(By.id("password")).sendKeys(password);
     LOG.info("Click on Sign In button.");
-    home.findElement(By.className("custLogin")).click();
+    home.findElement(By.cssSelector("login-form > div > form > button")).click();
     closeTourPopup();
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/fe7a8e4b/ambari-logsearch/ambari-logsearch-it/src/test/resources/stories/selenium/login.ui.story
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-it/src/test/resources/stories/selenium/login.ui.story b/ambari-logsearch/ambari-logsearch-it/src/test/resources/stories/selenium/login.ui.story
index 543c211..4824335 100644
--- a/ambari-logsearch/ambari-logsearch-it/src/test/resources/stories/selenium/login.ui.story
+++ b/ambari-logsearch/ambari-logsearch-it/src/test/resources/stories/selenium/login.ui.story
@@ -10,11 +10,11 @@ Scenario: login with admin/admin
 Given logsearch docker container
 And open logsearch home page
 When login with admin / admin
-Then page contains text: 'Service Logs'
+Then page contains text: 'Refresh'
 
 Scenario: login with admin and wrong password
 
 Given logsearch docker container
 And open logsearch home page
 When login with admin / wrongpassword
-Then page does not contain text: 'Service Logs'
\ No newline at end of file
+Then page does not contain text: 'Refresh'
\ No newline at end of file


[39/50] [abbrv] ambari git commit: AMBARI-22076 - Use A Scoped Repo When Installing stack-tools (unit test fix) (jonathanhurley)

Posted by rl...@apache.org.
AMBARI-22076 - Use A Scoped Repo When Installing stack-tools (unit test fix) (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d19f0bb7
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d19f0bb7
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d19f0bb7

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: d19f0bb7661404ccc72099621e9c2d7c97609d2a
Parents: 31344ff
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Fri Sep 29 15:55:26 2017 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Fri Sep 29 15:55:26 2017 -0400

----------------------------------------------------------------------
 .../src/test/python/resource_management/TestPackageResource.py     | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d19f0bb7/ambari-agent/src/test/python/resource_management/TestPackageResource.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/resource_management/TestPackageResource.py b/ambari-agent/src/test/python/resource_management/TestPackageResource.py
index 66227c6..49e3adf 100644
--- a/ambari-agent/src/test/python/resource_management/TestPackageResource.py
+++ b/ambari-agent/src/test/python/resource_management/TestPackageResource.py
@@ -186,7 +186,7 @@ class TestPackageResource(TestCase):
               )
     self.assertEquals(shell_mock.call_args[0][0],
                       ['/usr/bin/yum', '-d', '0', '-e', '0', '-y', 'install',
-                       '--disablerepo=',
+                       '--disablerepo=*',
                        '--enablerepo=HDP-UTILS-2.2.0.1-885,HDP-2.2.0.1-885', 'some_package'])
 
   @patch.object(shell, "call", new = MagicMock(return_value=(0, None)))


[11/50] [abbrv] ambari git commit: AMBARI-22059. Use symlinks for Infra components (oleewere)

Posted by rl...@apache.org.
AMBARI-22059. Use symlinks for Infra components (oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6fb05cb0
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6fb05cb0
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6fb05cb0

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 6fb05cb0f089ef0fee7fcc462efda967b814b668
Parents: 9d802b7
Author: Oliver Szabo <ol...@gmail.com>
Authored: Tue Sep 26 13:49:16 2017 +0200
Committer: Oliver Szabo <ol...@gmail.com>
Committed: Wed Sep 27 11:26:23 2017 +0200

----------------------------------------------------------------------
 ambari-infra/ambari-infra-assembly/pom.xml      |  8 +++++++
 .../src/main/package/deb/manager/postinst       |  5 ++++
 .../src/main/package/deb/solr-client/postinst   |  9 ++++++++
 .../src/main/package/rpm/manager/postinstall.sh | 20 ++++++++++++++++
 .../main/package/rpm/solr-client/postinstall.sh | 24 ++++++++++++++++++++
 .../src/main/resources/infraManager.sh          | 10 +++++++-
 .../src/main/resources/solrIndexHelper.sh       |  5 ++--
 7 files changed, 78 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/6fb05cb0/ambari-infra/ambari-infra-assembly/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-assembly/pom.xml b/ambari-infra/ambari-infra-assembly/pom.xml
index fafef7e..dfb5c01 100644
--- a/ambari-infra/ambari-infra-assembly/pom.xml
+++ b/ambari-infra/ambari-infra-assembly/pom.xml
@@ -110,6 +110,10 @@
                   <name>${solr.client.package.name}</name>
                   <needarch>noarch</needarch>
                   <targetOS>linux</targetOS>
+                  <postinstallScriptlet>
+                    <scriptFile>${project.basedir}/src/main/package/rpm/solr-client/postinstall.sh</scriptFile>
+                    <fileEncoding>utf-8</fileEncoding>
+                  </postinstallScriptlet>
                   <mappings>
                     <mapping>
                       <directory>${solr.client.mapping.path}</directory>
@@ -131,6 +135,10 @@
                 <configuration>
                   <group>Development</group>
                   <name>${infra-manager.package.name}</name>
+                  <postinstallScriptlet>
+                    <scriptFile>${project.basedir}/src/main/package/rpm/manager/postinstall.sh</scriptFile>
+                    <fileEncoding>utf-8</fileEncoding>
+                  </postinstallScriptlet>
                   <mappings>
                     <mapping>
                       <directory>${infra-manager.mapping.path}</directory>

http://git-wip-us.apache.org/repos/asf/ambari/blob/6fb05cb0/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/postinst
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/postinst b/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/postinst
index 21a01fa..c8d6ecd 100644
--- a/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/postinst
+++ b/ambari-infra/ambari-infra-assembly/src/main/package/deb/manager/postinst
@@ -13,3 +13,8 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License
+
+INFRA_MANAGER_LINK_NAME="/usr/bin/infra-manager"
+INFRA_MANAGER_SOURCE="/usr/lib/ambari-infra-manager/infraManager.sh"
+
+rm -f $INFRA_MANAGER_LINK_NAME ; ln -s $INFRA_MANAGER_SOURCE $INFRA_MANAGER_LINK_NAME

http://git-wip-us.apache.org/repos/asf/ambari/blob/6fb05cb0/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/postinst
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/postinst b/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/postinst
index 21a01fa..c339cd0 100644
--- a/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/postinst
+++ b/ambari-infra/ambari-infra-assembly/src/main/package/deb/solr-client/postinst
@@ -13,3 +13,12 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License
+
+SOLR_CLOUD_CLI_LINK_NAME="/usr/bin/infra-solr-cloud-cli"
+SOLR_CLOUD_CLI_SOURCE="/usr/lib/ambari-infra-solr-client/solrCloudCli.sh"
+
+SOLR_INDEX_TOOL_LINK_NAME="/usr/bin/infra-lucene-index-tool"
+SOLR_INDEX_TOOL_SOURCE="/usr/lib/ambari-infra-solr-client/solrIndexHelper.sh"
+
+rm -f $SOLR_CLOUD_CLI_LINK_NAME ; ln -s $SOLR_CLOUD_CLI_SOURCE $SOLR_CLOUD_CLI_LINK_NAME
+rm -f $SOLR_INDEX_TOOL_LINK_NAME ; ln -s $SOLR_INDEX_TOOL_SOURCE $SOLR_INDEX_TOOL_LINK_NAME
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/6fb05cb0/ambari-infra/ambari-infra-assembly/src/main/package/rpm/manager/postinstall.sh
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/rpm/manager/postinstall.sh b/ambari-infra/ambari-infra-assembly/src/main/package/rpm/manager/postinstall.sh
new file mode 100644
index 0000000..48a3998
--- /dev/null
+++ b/ambari-infra/ambari-infra-assembly/src/main/package/rpm/manager/postinstall.sh
@@ -0,0 +1,20 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License
+
+INFRA_MANAGER_LINK_NAME="/usr/bin/infra-manager"
+INFRA_MANAGER_SOURCE="/usr/lib/ambari-infra-manager/infraManager.sh"
+
+rm -f $INFRA_MANAGER_LINK_NAME ; ln -s $INFRA_MANAGER_SOURCE $INFRA_MANAGER_LINK_NAME
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/6fb05cb0/ambari-infra/ambari-infra-assembly/src/main/package/rpm/solr-client/postinstall.sh
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-assembly/src/main/package/rpm/solr-client/postinstall.sh b/ambari-infra/ambari-infra-assembly/src/main/package/rpm/solr-client/postinstall.sh
new file mode 100644
index 0000000..c339cd0
--- /dev/null
+++ b/ambari-infra/ambari-infra-assembly/src/main/package/rpm/solr-client/postinstall.sh
@@ -0,0 +1,24 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License
+
+SOLR_CLOUD_CLI_LINK_NAME="/usr/bin/infra-solr-cloud-cli"
+SOLR_CLOUD_CLI_SOURCE="/usr/lib/ambari-infra-solr-client/solrCloudCli.sh"
+
+SOLR_INDEX_TOOL_LINK_NAME="/usr/bin/infra-lucene-index-tool"
+SOLR_INDEX_TOOL_SOURCE="/usr/lib/ambari-infra-solr-client/solrIndexHelper.sh"
+
+rm -f $SOLR_CLOUD_CLI_LINK_NAME ; ln -s $SOLR_CLOUD_CLI_SOURCE $SOLR_CLOUD_CLI_LINK_NAME
+rm -f $SOLR_INDEX_TOOL_LINK_NAME ; ln -s $SOLR_INDEX_TOOL_SOURCE $SOLR_INDEX_TOOL_LINK_NAME
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/6fb05cb0/ambari-infra/ambari-infra-manager/src/main/resources/infraManager.sh
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-manager/src/main/resources/infraManager.sh b/ambari-infra/ambari-infra-manager/src/main/resources/infraManager.sh
index 65287b2..bbf03df 100644
--- a/ambari-infra/ambari-infra-manager/src/main/resources/infraManager.sh
+++ b/ambari-infra/ambari-infra-manager/src/main/resources/infraManager.sh
@@ -14,7 +14,15 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+: ${JAVA_HOME:?"Please set the JAVA_HOME variable!"}
+
 JVM="java"
 sdir="`dirname \"$0\"`"
+ldir="`dirname "$(readlink -f "$0")"`"
+
+DIR="$sdir"
+if [ "$sdir" != "$ldir" ]; then
+  DIR="$ldir"
+fi
 
-PATH=$JAVA_HOME/bin:$PATH nohup $JVM -classpath "/etc/ambari-infra-manager/conf:$sdir:$sdir/libs/*" $INFRA_MANAGER_OPTS org.apache.ambari.infra.InfraManager ${1+"$@"} &
\ No newline at end of file
+PATH=$JAVA_HOME/bin:$PATH nohup $JVM -classpath "/etc/ambari-infra-manager/conf:$DIR:$DIR/libs/*" $INFRA_MANAGER_OPTS org.apache.ambari.infra.InfraManager ${1+"$@"} &
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/6fb05cb0/ambari-infra/ambari-infra-solr-client/src/main/resources/solrIndexHelper.sh
----------------------------------------------------------------------
diff --git a/ambari-infra/ambari-infra-solr-client/src/main/resources/solrIndexHelper.sh b/ambari-infra/ambari-infra-solr-client/src/main/resources/solrIndexHelper.sh
index 1d219d6..6e817ec 100755
--- a/ambari-infra/ambari-infra-solr-client/src/main/resources/solrIndexHelper.sh
+++ b/ambari-infra/ambari-infra-solr-client/src/main/resources/solrIndexHelper.sh
@@ -14,6 +14,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+: ${JAVA_HOME:?"Please set the JAVA_HOME variable!"}
+
 JVM="java"
 sdir="`dirname \"$0\"`"
 ldir="`dirname "$(readlink -f "$0")"`"
@@ -22,12 +24,11 @@ DIR="$sdir"
 if [ "$sdir" != "$ldir" ]; then
   DIR="$ldir"
 fi
-: ${JAVA_HOME:?"Please set the JAVA_HOME for lucene index migration!"}
 
 function print_help() {
   cat << EOF
 
-   Usage: solrIndexHelper.sh [<command>] [<arguments with flags>]
+   Usage: [<command>] [<arguments with flags>]
 
    commands:
      upgrade-index            Check and upgrade solr index data in core directories.


[41/50] [abbrv] ambari git commit: AMBARI-22091.Hive view2.0 Jobs worksheets doesn't have hyperlink whereas view1.5 History worksheets had hyperlinks pointing to fetched results(Venkata Sairam)

Posted by rl...@apache.org.
AMBARI-22091.Hive view2.0 Jobs worksheets doesn't have hyperlink whereas view1.5 History worksheets had hyperlinks pointing to fetched results(Venkata Sairam)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d4378aac
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d4378aac
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d4378aac

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: d4378aacbd5bd95c0367f5b8a6865ee3bbe6de04
Parents: b84cbbe
Author: Venkata Sairam <ve...@gmail.com>
Authored: Sat Sep 30 09:05:33 2017 +0530
Committer: Venkata Sairam <ve...@gmail.com>
Committed: Sat Sep 30 09:05:59 2017 +0530

----------------------------------------------------------------------
 .../resources/ui/app/components/job-item.js     |  49 +++++-
 .../resources/ui/app/components/jobs-browser.js |   3 +
 .../src/main/resources/ui/app/routes/jobs.js    |  11 ++
 .../main/resources/ui/app/routes/queries/new.js |   2 +
 .../resources/ui/app/routes/queries/query.js    | 164 ++++++++++++-------
 .../src/main/resources/ui/app/styles/app.scss   |   8 +
 .../ui/app/templates/components/job-item.hbs    |   2 +-
 .../app/templates/components/jobs-browser.hbs   |   2 +-
 .../main/resources/ui/app/templates/jobs.hbs    |   1 +
 9 files changed, 177 insertions(+), 65 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d4378aac/contrib/views/hive20/src/main/resources/ui/app/components/job-item.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/job-item.js b/contrib/views/hive20/src/main/resources/ui/app/components/job-item.js
index 42a3411..0545624 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/components/job-item.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/job-item.js
@@ -17,12 +17,17 @@
  */
 
 import Ember from 'ember';
+import UILoggerMixin from '../mixins/ui-logger';
 
-export default Ember.Component.extend({
+export default Ember.Component.extend(UILoggerMixin, {
   jobs: Ember.inject.service(),
   tagName: '',
   expanded: false,
   expandedValue: null,
+  store: Ember.inject.service(),
+  savedQueries: Ember.inject.service(),
+
+
   actions: {
     toggleExpandJob(jobId) {
       if(this.get('expanded')) {
@@ -39,6 +44,48 @@ export default Ember.Component.extend({
         });
       }
 
+    },
+    openAsWorksheet(savedQuery){
+
+      let hasWorksheetModel = this.get('model'), self = this;
+      let worksheetId;
+
+      if (Ember.isEmpty(hasWorksheetModel)){
+        worksheetId = 1;
+      }else {
+
+        let isWorksheetExist = (this.get('model').filterBy('title', savedQuery.title).get('length') > 0);
+        if(isWorksheetExist) {
+          this.sendAction('openWorksheet', savedQuery, true);
+          return;
+        }
+
+        let worksheets = this.get('model');
+        worksheets.forEach((worksheet) => {
+          worksheet.set('selected', false);
+      });
+        worksheetId = `worksheet${worksheets.get('length') + 1}`;
+      }
+      var isTabExisting = this.get("store").peekRecord('worksheet', savedQuery.id);
+      if(isTabExisting) {
+        self.sendAction('openWorksheet', savedQuery, true);
+        return;
+      }
+      this.get("savedQueries").fetchSavedQuery(savedQuery.get('queryFile')).then(function(response) {
+        let localWs = {
+          id: savedQuery.get('id'),
+          title: savedQuery.get('title'),
+          queryFile: savedQuery.get('queryFile'),
+          query: response.file.fileContent,
+          selectedDb : savedQuery.get('dataBase'),
+          owner: savedQuery.get('owner'),
+          selected: true
+        };
+        self.sendAction('openWorksheet', localWs);
+      }, (error) => {
+        self.get('logger').danger('Failed to load the query', self.extractError(error));
+    });
+
     }
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/d4378aac/contrib/views/hive20/src/main/resources/ui/app/components/jobs-browser.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/components/jobs-browser.js b/contrib/views/hive20/src/main/resources/ui/app/components/jobs-browser.js
index 7e24843..ae081de 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/components/jobs-browser.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/components/jobs-browser.js
@@ -93,6 +93,9 @@ export default Ember.Component.extend({
 
     clearTitleFilter() {
       this.set('titleFilter');
+    },
+    openWorksheet(worksheet, isExisitingWorksheet){
+      this.sendAction("openWorksheet", worksheet, isExisitingWorksheet);
     }
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/d4378aac/contrib/views/hive20/src/main/resources/ui/app/routes/jobs.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/jobs.js b/contrib/views/hive20/src/main/resources/ui/app/routes/jobs.js
index 3e88ac6..1b6351f 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/jobs.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/jobs.js
@@ -20,6 +20,7 @@ import Ember from 'ember';
 
 export default Ember.Route.extend({
   moment: Ember.inject.service(),
+  query: Ember.inject.service(),
   timeInitializedTo: null,
   queryParams: {
     startTime: {
@@ -63,6 +64,16 @@ export default Ember.Route.extend({
       this.controller.set('startTime', this.get('moment').moment(startTime, 'YYYY-MM-DD').startOf('day').valueOf());
       this.controller.set('endTime', this.get('moment').moment(endTime, 'YYYY-MM-DD').endOf('day').valueOf());
       this.refresh();
+    },
+    openWorksheet(worksheet, isExisitingWorksheet) {
+      if(isExisitingWorksheet) {
+       this.transitionTo('queries.query', worksheet.id);
+       return;
+      }
+      this.get("store").createRecord('worksheet', worksheet );
+      this.controllerFor('queries').set('worksheets', this.store.peekAll('worksheet'));
+      this.transitionTo('queries.query', worksheet.id);
+      this.controllerFor("queries.query").set('previewJobData', {id:worksheet.id, title:worksheet.title.toLowerCase()});
     }
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/d4378aac/contrib/views/hive20/src/main/resources/ui/app/routes/queries/new.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/new.js b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/new.js
index 76a7439..1c4a7ba 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/new.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/new.js
@@ -43,5 +43,7 @@ export default Ember.Route.extend({
     existingWorksheets.setEach('selected', false);
     this.controllerFor('queries').set('worksheets', this.store.peekAll('worksheet'));
     this.transitionTo('queries.query', newWorksheetTitle);
+    this.controllerFor("queries.query").set('previewJobData', null);
+
   }
 });

http://git-wip-us.apache.org/repos/asf/ambari/blob/d4378aac/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
index 291c335..01e1497 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
+++ b/contrib/views/hive20/src/main/resources/ui/app/routes/queries/query.js
@@ -53,8 +53,10 @@ export default Ember.Route.extend(UILoggerMixin, {
     console.log('lastResultRoute:: ', model.get('lastResultRoute'));
     let lastResultRoute = model.get('lastResultRoute');
 
+
+
     if(Ember.isEmpty(lastResultRoute)){
-      if(model.get('jobData').length > 0){
+      if(model.get('jobData') && model.get('jobData').length > 0){
         this.transitionTo('queries.query.results');
       } else {
         this.transitionTo('queries.query');
@@ -145,6 +147,22 @@ export default Ember.Route.extend(UILoggerMixin, {
     controller.set('worksheetModalSuccess',false);
     controller.set('worksheetModalFail',false);
     controller.set('tabs', tabs);
+    let previewJobData = this.get('controller').get('previewJobData'), ctrlr = this.get('controller'), ctrlrModel = this.get('controller.model');
+
+
+    if(previewJobData) {
+
+      ctrlrModel.set('isJobSuccess', true);
+      ctrlrModel.set('isJobCancelled', false);
+      ctrlrModel.set('isJobCreated', false);
+      ctrlr.set('isJobSuccess', true);
+      ctrlr.set('isJobCancelled', false);
+      ctrlr.set('isJobCreated', false);
+      ctrlrModel.set('currentJobId', previewJobData.id);
+      this.get('controller.model').set('currentJobData', {job: previewJobData});
+
+      this.getJobResult({job: previewJobData}, previewJobData.title, Ember.Object.create({name: 'query'}), ctrlrModel, true);
+    }
   },
   checkIfDeafultDatabaseExists(alldatabases){
     if(this.get('controller.model').get('selectedDb')) {
@@ -172,6 +190,75 @@ export default Ember.Route.extend(UILoggerMixin, {
       this.send('closeWorksheet', tabDataToClose.index, tabDataToClose.id);
     }
   },
+
+  getJobResult(data, payloadTitle, jobDetails, ctrlrModel, isDataPreview){
+    let self = this;
+
+    let isVisualExplainQuery = this.get('controller').get('isVisualExplainQuery');
+
+    let jobId = data.job.id;
+
+    let currentPage = this.get('controller.model').get('currentPage');
+    let previousPage = this.get('controller.model').get('previousPage');
+    let nextPage = this.get('controller.model').get('nextPage');
+
+    this.get('query').getJob(jobId, true).then(function(data) {
+
+      let existingWorksheets = self.get('store').peekAll('worksheet');
+      let myWs = null;
+      if(existingWorksheets.get('length') > 0) {
+        if(isDataPreview) {
+          myWs = existingWorksheets.filterBy('id', jobId).get('firstObject');
+        } else {
+          myWs = existingWorksheets.filterBy('id', payloadTitle).get('firstObject');
+        }
+      }
+      if(!Ember.isBlank(jobDetails.get("dagId"))) {
+        let tezData = self.get("tezViewInfo").getTezViewData();
+        if(tezData && tezData.error) {
+          self.get('controller.model').set('tezError', tezData.errorMsg);
+        } else if(tezData.tezUrl) {
+          self.get('controller.model').set('tezUrl', tezData.tezUrl + jobDetails.get("dagId"));
+        }
+      }
+      myWs.set('queryResult', data);
+      myWs.set('isQueryRunning', false);
+      myWs.set('hasNext', data.hasNext);
+      self.get('controller.model').set('queryResult', data);
+
+
+      let localArr = myWs.get("jobData");
+      localArr.push(data);
+      myWs.set('jobData', localArr);
+      myWs.set('currentPage', currentPage+1);
+      myWs.set('previousPage', previousPage + 1 );
+      myWs.set('nextPage', nextPage + 1);
+
+      if(isVisualExplainQuery){
+        self.send('showVisualExplain', payloadTitle);
+      } else {
+        self.get('controller.model').set('visualExplainJson', null);
+
+        if( self.paramsFor('queries.query').worksheetId && (self.paramsFor('queries.query').worksheetId.toLowerCase() === payloadTitle) || (isDataPreview && self.paramsFor('queries.query').worksheetId && (self.paramsFor('queries.query').worksheetId.toLowerCase() === jobId))){
+          self.transitionTo('queries.query.loading');
+
+          Ember.run.later(() => {
+            self.transitionTo('queries.query.results');
+          }, 1 * 100);
+        }
+      }
+
+    }, function(error) {
+      console.log('error' , error);
+      if(!isDataPreview){
+        self.get('logger').danger('Failed to execute query.', self.extractError(error));
+      } else {
+        self.get('logger').danger('Query expired. Please execute the query again.', self.extractError(error));
+      }
+      self.send('resetDefaultWorksheet', ctrlrModel);
+    });
+  },
+
   actions: {
 
     resetDefaultWorksheet(currModel){
@@ -332,7 +419,7 @@ export default Ember.Route.extend(UILoggerMixin, {
             ctrlr.set('isJobCreated', false);
             let jobDetails = self.store.peekRecord('job', data.job.id);
             console.log(jobDetails);
-            self.send('getJobResult', data, payload.id, jobDetails, ctrlrModel);
+            self.getJobResult(data, payload.id, jobDetails, ctrlrModel);
             self.get('logger').success('Query has been submitted.');
 
           }, (error) => {
@@ -360,64 +447,6 @@ export default Ember.Route.extend(UILoggerMixin, {
         .then( data => this.get('controller').set('isJobCancelled', true));
     },
 
-    getJobResult(data, payloadTitle, jobDetails, ctrlrModel){
-      let self = this;
-
-      let isVisualExplainQuery = this.get('controller').get('isVisualExplainQuery');
-
-      let jobId = data.job.id;
-
-      let currentPage = this.get('controller.model').get('currentPage');
-      let previousPage = this.get('controller.model').get('previousPage');
-      let nextPage = this.get('controller.model').get('nextPage');
-
-      this.get('query').getJob(jobId, true).then(function(data) {
-
-        let existingWorksheets = self.get('store').peekAll('worksheet');
-        let myWs = null;
-        if(existingWorksheets.get('length') > 0) {
-          myWs = existingWorksheets.filterBy('id', payloadTitle).get('firstObject');
-        }
-        if(!Ember.isBlank(jobDetails.get("dagId"))) {
-          let tezData = self.get("tezViewInfo").getTezViewData();
-          if(tezData && tezData.error) {
-            self.get('controller.model').set('tezError', tezData.errorMsg);
-          } else if(tezData.tezUrl) {
-            self.get('controller.model').set('tezUrl', tezData.tezUrl + jobDetails.get("dagId"));
-          }
-        }
-        myWs.set('queryResult', data);
-        myWs.set('isQueryRunning', false);
-        myWs.set('hasNext', data.hasNext);
-
-        let localArr = myWs.get("jobData");
-        localArr.push(data);
-        myWs.set('jobData', localArr);
-        myWs.set('currentPage', currentPage+1);
-        myWs.set('previousPage', previousPage + 1 );
-        myWs.set('nextPage', nextPage + 1);
-
-        if(isVisualExplainQuery){
-          self.send('showVisualExplain', payloadTitle);
-        } else {
-          self.get('controller.model').set('visualExplainJson', null);
-
-          if( self.paramsFor('queries.query').worksheetId && (self.paramsFor('queries.query').worksheetId.toLowerCase() === payloadTitle)){
-            self.transitionTo('queries.query.loading');
-
-            Ember.run.later(() => {
-              self.transitionTo('queries.query.results');
-            }, 1 * 100);
-          }
-        }
-
-      }, function(error) {
-        console.log('error' , error);
-        self.get('logger').danger('Failed to execute query.', self.extractError(error));
-        self.send('resetDefaultWorksheet', ctrlrModel);
-      });
-    },
-
     showVisualExplain(payloadTitle){
        if( this.paramsFor('queries.query').worksheetId && this.paramsFor('queries.query').worksheetId.toLowerCase() === payloadTitle){
          this.transitionTo('queries.query.loading');
@@ -468,7 +497,12 @@ export default Ember.Route.extend(UILoggerMixin, {
       let existingWorksheets = this.get('store').peekAll('worksheet');
       let myWs = null;
       if(existingWorksheets.get('length') > 0) {
-        myWs = existingWorksheets.filterBy('id', payloadTitle.toLowerCase()).get('firstObject');
+        var previewJobData = this.get('controller').get('previewJobData');
+        if(previewJobData) {
+          myWs = existingWorksheets.filterBy('id', previewJobData.id).get('firstObject');
+        } else {
+          myWs = existingWorksheets.filterBy('id', payloadTitle.toLowerCase()).get('firstObject');
+        }
       }
 
       this.transitionTo('queries.query.loading');
@@ -497,7 +531,13 @@ export default Ember.Route.extend(UILoggerMixin, {
       let existingWorksheets = this.get('store').peekAll('worksheet');
       let myWs = null;
       if(existingWorksheets.get('length') > 0) {
-        myWs = existingWorksheets.filterBy('id', payloadTitle.toLowerCase()).get('firstObject');
+        var previewJobData = this.get('controller').get('previewJobData');
+        if(previewJobData) {
+          myWs = existingWorksheets.filterBy('id', previewJobData.id).get('firstObject');
+        } else {
+          myWs = existingWorksheets.filterBy('id', payloadTitle.toLowerCase()).get('firstObject');
+        }
+
       }
 
       this.transitionTo('queries.query.loading');

http://git-wip-us.apache.org/repos/asf/ambari/blob/d4378aac/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss b/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
index 522b2d3..d7f14b0 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
+++ b/contrib/views/hive20/src/main/resources/ui/app/styles/app.scss
@@ -1014,3 +1014,11 @@ rect.operator__box {
 .sort-icon {
   padding-left: 5px;
 }
+
+.jobs-table .table-striped td{
+  border: none;
+}
+
+.workflow-name {
+  cursor: pointer;
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/d4378aac/contrib/views/hive20/src/main/resources/ui/app/templates/components/job-item.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/job-item.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/job-item.hbs
index 16caad0..6ebdae0 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/components/job-item.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/job-item.hbs
@@ -18,7 +18,7 @@
 
 <tr>
   <td>{{job.id}}</td>
-  <td class="break-word">{{job.title}}</td>
+  <td class="break-word workflow-name"><a {{action "openAsWorksheet" job}}>{{job.title}}</a></td>
   <td>{{job.status}}</td>
   <td>{{moment-from-now job.dateSubmitted}}</td>
   <td>{{job.duration}}</td>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d4378aac/contrib/views/hive20/src/main/resources/ui/app/templates/components/jobs-browser.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/components/jobs-browser.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/components/jobs-browser.hbs
index d29ca73..7ba3f22 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/components/jobs-browser.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/components/jobs-browser.hbs
@@ -94,7 +94,7 @@
       </thead>
       <tbody>
         {{#each filteredJobsSorted as |job| }}
-          {{job-item job=job}}
+          {{job-item model=jobs job=job openWorksheet="openWorksheet"}}
         {{/each}}
       </tbody>
     </table>

http://git-wip-us.apache.org/repos/asf/ambari/blob/d4378aac/contrib/views/hive20/src/main/resources/ui/app/templates/jobs.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hive20/src/main/resources/ui/app/templates/jobs.hbs b/contrib/views/hive20/src/main/resources/ui/app/templates/jobs.hbs
index 4382b7a..ba8fccc 100644
--- a/contrib/views/hive20/src/main/resources/ui/app/templates/jobs.hbs
+++ b/contrib/views/hive20/src/main/resources/ui/app/templates/jobs.hbs
@@ -20,4 +20,5 @@
                startTime=startTimeText
                endTime=endTimeText
                filterChanged="dateFilterChanged"
+               openWorksheet="openWorksheet"
 }}


[40/50] [abbrv] ambari git commit: AMBARI-22083 - Wrong Hadoop Home Directory Is Being Picked Up on MAINT/PATCH Upgraded Clusters (part2) (jonathanhurley)

Posted by rl...@apache.org.
AMBARI-22083 - Wrong Hadoop Home Directory Is Being Picked Up on MAINT/PATCH Upgraded Clusters (part2) (jonathanhurley)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b84cbbe7
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b84cbbe7
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b84cbbe7

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: b84cbbe7e9a3afffab07da534541b4eb6076d50c
Parents: d19f0bb
Author: Jonathan Hurley <jh...@hortonworks.com>
Authored: Fri Sep 29 16:01:56 2017 -0400
Committer: Jonathan Hurley <jh...@hortonworks.com>
Committed: Fri Sep 29 16:01:56 2017 -0400

----------------------------------------------------------------------
 .../stacks/HDP/3.0/hooks/after-INSTALL/scripts/params.py       | 2 +-
 .../stacks/HDP/3.0/hooks/before-ANY/scripts/params.py          | 6 +++---
 .../stacks/HDP/3.0/hooks/before-START/scripts/params.py        | 2 +-
 3 files changed, 5 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/b84cbbe7/ambari-server/src/main/resources/stacks/HDP/3.0/hooks/after-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/3.0/hooks/after-INSTALL/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/3.0/hooks/after-INSTALL/scripts/params.py
index ddc6100..34dfe70 100644
--- a/ambari-server/src/main/resources/stacks/HDP/3.0/hooks/after-INSTALL/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/3.0/hooks/after-INSTALL/scripts/params.py
@@ -101,7 +101,7 @@ namenode_host = default("/clusterHostInfo/namenode_host", [])
 has_namenode = not len(namenode_host) == 0
 
 if has_namenode or dfs_type == 'HCFS':
-  hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
+  hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 
 link_configs_lock_file = get_config_lock_file()
 stack_select_lock_file = os.path.join(tmp_dir, "stack_select_lock_file")

http://git-wip-us.apache.org/repos/asf/ambari/blob/b84cbbe7/ambari-server/src/main/resources/stacks/HDP/3.0/hooks/before-ANY/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/3.0/hooks/before-ANY/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/3.0/hooks/before-ANY/scripts/params.py
index b111c02..cee0519 100644
--- a/ambari-server/src/main/resources/stacks/HDP/3.0/hooks/before-ANY/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/3.0/hooks/before-ANY/scripts/params.py
@@ -107,8 +107,8 @@ mapreduce_libs_path = format("{stack_root}/current/hadoop-mapreduce-client/*")
 # which would cause a lot of problems when writing out hadoop-env.sh; instead
 # force the use of "current" in the hook
 hdfs_user_nofile_limit = default("/configurations/hadoop-env/hdfs_user_nofile_limit", "128000")
-hadoop_home = stack_select.get_hadoop_dir("home", force_latest_on_upgrade=True)
-hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec", force_latest_on_upgrade=True)
+hadoop_home = stack_select.get_hadoop_dir("home")
+hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec")
 
 hadoop_conf_empty_dir = None
 hadoop_secure_dn_user = hdfs_user
@@ -210,7 +210,7 @@ if dfs_ha_namenode_ids:
 
 
 if has_namenode or dfs_type == 'HCFS':
-    hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
+    hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
     hadoop_conf_secure_dir = os.path.join(hadoop_conf_dir, "secure")
 
 hbase_tmp_dir = "/tmp/hbase-hbase"

http://git-wip-us.apache.org/repos/asf/ambari/blob/b84cbbe7/ambari-server/src/main/resources/stacks/HDP/3.0/hooks/before-START/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/3.0/hooks/before-START/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/3.0/hooks/before-START/scripts/params.py
index d70030d..8555fea 100644
--- a/ambari-server/src/main/resources/stacks/HDP/3.0/hooks/before-START/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/3.0/hooks/before-START/scripts/params.py
@@ -176,7 +176,7 @@ if has_zk_host:
 
 if has_namenode or dfs_type == 'HCFS':
   hadoop_tmp_dir = format("/tmp/hadoop-{hdfs_user}")
-  hadoop_conf_dir = conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
+  hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
   task_log4j_properties_location = os.path.join(hadoop_conf_dir, "task-log4j.properties")
 
 hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']


[08/50] [abbrv] ambari git commit: AMBARI-22062. Stack selection page does not load the HDP stacks (Redux) (ncole)

Posted by rl...@apache.org.
AMBARI-22062. Stack selection page does not load the HDP stacks (Redux) (ncole)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/dc419b4a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/dc419b4a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/dc419b4a

Branch: refs/heads/branch-feature-AMBARI-20859
Commit: dc419b4a3940d514d714b27c06e641e69b586760
Parents: cb27cee
Author: Nate Cole <nc...@hortonworks.com>
Authored: Tue Sep 26 15:19:57 2017 -0400
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Tue Sep 26 15:20:02 2017 -0400

----------------------------------------------------------------------
 .../ambari/server/api/services/AmbariMetaInfo.java   |  2 +-
 .../org/apache/ambari/server/stack/StackContext.java | 15 ++++++++++++---
 2 files changed, 13 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/dc419b4a/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
index aad35a9..de84965 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/AmbariMetaInfo.java
@@ -1348,7 +1348,7 @@ public class AmbariMetaInfo {
   /**
    * Ensures that the map of version definition files is populated
    */
-  private void ensureVersionDefinitions() {
+  private synchronized void ensureVersionDefinitions() {
     if (null != versionDefinitions) {
       return;
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/dc419b4a/ambari-server/src/main/java/org/apache/ambari/server/stack/StackContext.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/StackContext.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackContext.java
index d430bb7..2992027 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/stack/StackContext.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackContext.java
@@ -61,7 +61,7 @@ public class StackContext {
   private LatestRepoQueryExecutor repoUpdateExecutor;
 
   private final static Logger LOG = LoggerFactory.getLogger(StackContext.class);
-  private static final int THREAD_COUNT = 5;
+  private static final int THREAD_COUNT = 10;
 
 
   /**
@@ -167,7 +167,7 @@ public class StackContext {
      */
     public void execute() {
 
-      long l = System.nanoTime();
+      long currentTime = System.nanoTime();
       List<Future<Map<StackModule, RepoUrlInfoResult>>> results = new ArrayList<>();
 
       // !!! first, load the *_urlinfo.json files and block for completion
@@ -177,7 +177,7 @@ public class StackContext {
         LOG.warn("Could not load urlinfo as the executor was interrupted", e);
         return;
       } finally {
-        LOG.info("Loaded urlinfo in " + TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - l) + "ms");
+        LOG.info("Loaded urlinfo in " + TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - currentTime) + "ms");
       }
 
       List<Map<StackModule, RepoUrlInfoResult>> urlInfoResults = new ArrayList<>();
@@ -190,6 +190,7 @@ public class StackContext {
         }
       }
 
+      currentTime = System.nanoTime();
       for (Map<StackModule, RepoUrlInfoResult> urlInfoResult : urlInfoResults) {
         for (Entry<StackModule, RepoUrlInfoResult> entry : urlInfoResult.entrySet()) {
           StackModule stackModule = entry.getKey();
@@ -212,6 +213,14 @@ public class StackContext {
       }
 
       executor.shutdown();
+
+      try {
+        executor.awaitTermination(2,  TimeUnit.MINUTES);
+      } catch (InterruptedException e) {
+        LOG.warn("Loading all VDF was interrupted", e.getCause());
+      } finally {
+        LOG.info("Loaded all VDF in " + TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - currentTime) + "ms");
+      }
     }
 
     /**