You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by rl...@apache.org on 2017/06/09 18:47:09 UTC
[1/9] ambari git commit: AMBARI-19369. Add Kerberos HTTP SPNEGO
authentication support to Hadoop/hbase/kafka/storm sinks (Qin Liu via rlevas)
Repository: ambari
Updated Branches:
refs/heads/branch-feature-AMBARI-20859 103dfffe7 -> ebe393900
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
index 5702b57..d2968f8 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
@@ -416,6 +416,16 @@ class TestDatanode(RMFTestCase):
group = 'root',
mode = 0644,
)
+ self.assertResourceCalled('File', conf_dir + '/hdfs_dn_jaas.conf',
+ content = Template('hdfs_dn_jaas.conf.j2'),
+ owner = 'hdfs',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('File', conf_dir + '/hdfs_nn_jaas.conf',
+ content = Template('hdfs_nn_jaas.conf.j2'),
+ owner = 'hdfs',
+ group = 'hadoop',
+ )
self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
owner = 'hdfs',
group = 'hadoop',
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
index 2202661..ff8f92e 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
@@ -230,6 +230,16 @@ class TestJournalnode(RMFTestCase):
group = 'root',
mode = 0644,
)
+ self.assertResourceCalled('File', '/etc/hadoop/conf/hdfs_dn_jaas.conf',
+ content = Template('hdfs_dn_jaas.conf.j2'),
+ owner = 'hdfs',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('File', '/etc/hadoop/conf/hdfs_nn_jaas.conf',
+ content = Template('hdfs_nn_jaas.conf.j2'),
+ owner = 'hdfs',
+ group = 'hadoop',
+ )
self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
owner = 'hdfs',
group = 'hadoop',
@@ -251,7 +261,6 @@ class TestJournalnode(RMFTestCase):
)
-
@patch('time.sleep')
def test_post_upgrade_restart(self, time_mock):
# load the NN and JN JMX files so that the urllib2.urlopen mock has data
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
index 01149fb..a7233c7 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
@@ -289,7 +289,7 @@ class TestNamenode(RMFTestCase):
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
- self.assert_configure_secured()
+ self.assert_configure_secured(False)
self.assertNoMoreResources()
@@ -302,7 +302,7 @@ class TestNamenode(RMFTestCase):
target = RMFTestCase.TARGET_COMMON_SERVICES,
call_mocks = [(0,"")],
)
- self.assert_configure_secured()
+ self.assert_configure_secured(False)
self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
owner = 'hdfs',
content = Template('exclude_hosts_list.j2'),
@@ -622,7 +622,7 @@ class TestNamenode(RMFTestCase):
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
- self.assert_configure_secured()
+ self.assert_configure_secured(True)
self.assertResourceCalled('File', '/etc/hadoop/conf/dfs.exclude',
owner = 'hdfs',
content = Template('exclude_hosts_list.j2'),
@@ -1153,7 +1153,7 @@ class TestNamenode(RMFTestCase):
cd_access='a'
)
- def assert_configure_secured(self):
+ def assert_configure_secured(self, ha_enabled):
self.assertResourceCalled('Directory', '/usr/lib/hadoop/lib/native/Linux-i386-32',
create_parents = True,
)
@@ -1177,6 +1177,22 @@ class TestNamenode(RMFTestCase):
group = 'root',
mode = 0644,
)
+ self.assertResourceCalled('File', '/etc/hadoop/conf/hdfs_dn_jaas.conf',
+ content = Template('hdfs_dn_jaas.conf.j2'),
+ owner = 'hdfs',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('File', '/etc/hadoop/conf/hdfs_nn_jaas.conf',
+ content = Template('hdfs_nn_jaas.conf.j2'),
+ owner = 'hdfs',
+ group = 'hadoop',
+ )
+ if ha_enabled:
+ self.assertResourceCalled('File', '/etc/hadoop/conf/hdfs_jn_jaas.conf',
+ content = Template('hdfs_jn_jaas.conf.j2'),
+ owner = 'hdfs',
+ group = 'hadoop',
+ )
self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
owner = 'hdfs',
group = 'hadoop',
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
index b8fee12..de425cd 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
@@ -243,6 +243,16 @@ class TestNFSGateway(RMFTestCase):
group = 'root',
mode = 0644,
)
+ self.assertResourceCalled('File', '/etc/hadoop/conf/hdfs_dn_jaas.conf',
+ content = Template('hdfs_dn_jaas.conf.j2'),
+ owner = 'hdfs',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('File', '/etc/hadoop/conf/hdfs_nn_jaas.conf',
+ content = Template('hdfs_nn_jaas.conf.j2'),
+ owner = 'hdfs',
+ group = 'hadoop',
+ )
self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
owner = 'hdfs',
group = 'hadoop',
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
index 9e9366d..b3d7016 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
@@ -248,6 +248,16 @@ class TestSNamenode(RMFTestCase):
group = 'root',
mode = 0644,
)
+ self.assertResourceCalled('File', '/etc/hadoop/conf/hdfs_dn_jaas.conf',
+ content = Template('hdfs_dn_jaas.conf.j2'),
+ owner = 'hdfs',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('File', '/etc/hadoop/conf/hdfs_nn_jaas.conf',
+ content = Template('hdfs_nn_jaas.conf.j2'),
+ owner = 'hdfs',
+ group = 'hadoop',
+ )
self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
owner = 'hdfs',
group = 'hadoop',
@@ -274,4 +284,4 @@ class TestSNamenode(RMFTestCase):
mode = 0755,
create_parents = True,
cd_access='a'
- )
\ No newline at end of file
+ )
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
index 127a045..0fe200c 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
@@ -154,6 +154,21 @@ class TestZkfc(RMFTestCase):
group = 'root',
mode = 0644,
)
+ self.assertResourceCalled('File', '/etc/hadoop/conf/hdfs_dn_jaas.conf',
+ content = Template('hdfs_dn_jaas.conf.j2'),
+ owner = 'hdfs',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('File', '/etc/hadoop/conf/hdfs_nn_jaas.conf',
+ content = Template('hdfs_nn_jaas.conf.j2'),
+ owner = 'hdfs',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('File', '/etc/hadoop/conf/hdfs_jn_jaas.conf',
+ content = Template('hdfs_jn_jaas.conf.j2'),
+ owner = 'hdfs',
+ group = 'hadoop',
+ )
self.assertResourceCalled('XmlConfig', 'hdfs-site.xml',
owner = 'hdfs',
group = 'hadoop',
@@ -381,4 +396,4 @@ class TestZkfc(RMFTestCase):
environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
not_if = "ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E test -f /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid && ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E pgrep -F /var/run/hadoop/hdfs/hadoop-hdfs-zkfc.pid",
)
- self.assertNoMoreResources()
\ No newline at end of file
+ self.assertNoMoreResources()
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
index 545f0e6..53d16fd 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
@@ -712,6 +712,16 @@ class TestHistoryServer(RMFTestCase):
owner = 'yarn',
group = 'hadoop',
)
+ self.assertResourceCalled('File', '/etc/hadoop/conf/yarn_nm_jaas.conf',
+ content = Template('yarn_nm_jaas.conf.j2'),
+ owner = 'yarn',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('File', '/etc/hadoop/conf/mapred_jaas.conf',
+ content = Template('mapred_jaas.conf.j2'),
+ owner = 'mapred',
+ group = 'hadoop',
+ )
self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
owner = 'mapred',
group = 'hadoop',
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
index b05d9f2..7e06969 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
@@ -346,6 +346,16 @@ class TestMapReduce2Client(RMFTestCase):
owner = 'yarn',
group = 'hadoop',
)
+ self.assertResourceCalled('File', '/etc/hadoop/conf/yarn_nm_jaas.conf',
+ content = Template('yarn_nm_jaas.conf.j2'),
+ owner = 'yarn',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('File', '/etc/hadoop/conf/mapred_jaas.conf',
+ content = Template('mapred_jaas.conf.j2'),
+ owner = 'mapred',
+ group = 'hadoop',
+ )
self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
owner = 'mapred',
group = 'hadoop',
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
index 6fc5bae..dfbdd23 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
@@ -530,6 +530,16 @@ class TestNodeManager(RMFTestCase):
owner = 'yarn',
group = 'hadoop',
)
+ self.assertResourceCalled('File', '/etc/hadoop/conf/yarn_nm_jaas.conf',
+ content = Template('yarn_nm_jaas.conf.j2'),
+ owner = 'yarn',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('File', '/etc/hadoop/conf/mapred_jaas.conf',
+ content = Template('mapred_jaas.conf.j2'),
+ owner = 'mapred',
+ group = 'hadoop',
+ )
self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
owner = 'mapred',
group = 'hadoop',
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
index ed5ee2c..5ccd6a4 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
@@ -498,6 +498,16 @@ class TestResourceManager(RMFTestCase):
owner = 'yarn',
group = 'hadoop',
)
+ self.assertResourceCalled('File', '/etc/hadoop/conf/yarn_nm_jaas.conf',
+ content = Template('yarn_nm_jaas.conf.j2'),
+ owner = 'yarn',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('File', '/etc/hadoop/conf/mapred_jaas.conf',
+ content = Template('mapred_jaas.conf.j2'),
+ owner = 'mapred',
+ group = 'hadoop',
+ )
self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
owner = 'mapred',
group = 'hadoop',
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
index 8873fbf..f71c93a 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
@@ -346,6 +346,16 @@ class TestYarnClient(RMFTestCase):
owner = 'yarn',
group = 'hadoop',
)
+ self.assertResourceCalled('File', '/etc/hadoop/conf/yarn_nm_jaas.conf',
+ content = Template('yarn_nm_jaas.conf.j2'),
+ owner = 'yarn',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('File', '/etc/hadoop/conf/mapred_jaas.conf',
+ content = Template('mapred_jaas.conf.j2'),
+ owner = 'mapred',
+ group = 'hadoop',
+ )
self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
owner = 'mapred',
group = 'hadoop',
[6/9] ambari git commit: AMBARI-21209. Incorrect ambari DDL script
for Oracle DB (dlysnichenko)
Posted by rl...@apache.org.
AMBARI-21209. Incorrect ambari DDL script for Oracle DB (dlysnichenko)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/190ecad0
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/190ecad0
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/190ecad0
Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 190ecad0063340d163d220baddbb6b40b6d7ab83
Parents: 215bd7a
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Fri Jun 9 12:54:05 2017 +0300
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Fri Jun 9 12:54:05 2017 +0300
----------------------------------------------------------------------
.../src/main/resources/Ambari-DDL-Oracle-CREATE.sql | 12 ++++++------
1 file changed, 6 insertions(+), 6 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/190ecad0/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
index 903fc9e..8f9406d 100644
--- a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
+++ b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
@@ -76,7 +76,7 @@ CREATE TABLE clusterconfig (
config_data CLOB NOT NULL,
config_attributes CLOB,
create_timestamp NUMBER(19) NOT NULL,
- service_deleted SMALLINT NOT NULL DEFAULT 0,
+ service_deleted SMALLINT DEFAULT 0 NOT NULL,
selected_timestamp NUMBER(19) DEFAULT 0 NOT NULL,
CONSTRAINT PK_clusterconfig PRIMARY KEY (config_id),
CONSTRAINT FK_clusterconfig_cluster_id FOREIGN KEY (cluster_id) REFERENCES clusters (cluster_id),
@@ -324,8 +324,8 @@ CREATE TABLE request (
request_context VARCHAR(255),
request_type VARCHAR(255),
start_time NUMBER(19) NOT NULL,
- status VARCHAR(255) NOT NULL DEFAULT 'PENDING',
- display_status VARCHAR(255) NOT NULL DEFAULT 'PENDING',
+ status VARCHAR(255) DEFAULT 'PENDING' NOT NULL,
+ display_status VARCHAR(255) DEFAULT 'PENDING' NOT NULL,
cluster_host_info BLOB NOT NULL,
CONSTRAINT PK_request PRIMARY KEY (request_id),
CONSTRAINT FK_request_schedule_id FOREIGN KEY (request_schedule_id) REFERENCES requestschedule (schedule_id));
@@ -341,8 +341,8 @@ CREATE TABLE stage (
command_params BLOB,
host_params BLOB,
command_execution_type VARCHAR2(32) DEFAULT 'STAGE' NOT NULL,
- status VARCHAR(255) NOT NULL DEFAULT 'PENDING',
- display_status VARCHAR(255) NOT NULL DEFAULT 'PENDING',
+ status VARCHAR(255) DEFAULT 'PENDING' NOT NULL,
+ display_status VARCHAR(255) DEFAULT 'PENDING' NOT NULL,
CONSTRAINT PK_stage PRIMARY KEY (stage_id, request_id),
CONSTRAINT FK_stage_request_id FOREIGN KEY (request_id) REFERENCES request (request_id));
@@ -361,7 +361,7 @@ CREATE TABLE host_role_command (
start_time NUMBER(19) NOT NULL,
original_start_time NUMBER(19) NOT NULL,
end_time NUMBER(19),
- status VARCHAR2(255) NOT NULL DEFAULT 'PENDING',
+ status VARCHAR2(255) DEFAULT 'PENDING' NOT NULL,
auto_skip_on_failure NUMBER(1) DEFAULT 0 NOT NULL,
std_error BLOB NULL,
std_out BLOB NULL,
[4/9] ambari git commit: AMBARI-21205 Make ToggleKerberos and
AddDeleteService experimental features (Duc Le via rzang)
Posted by rl...@apache.org.
AMBARI-21205 Make ToggleKerberos and AddDeleteService experimental features (Duc Le via rzang)
Change-Id: I578ddcebbad34eefd40abef4b6524fc69b4cb8fc
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/57bb1365
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/57bb1365
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/57bb1365
Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 57bb1365e414c1f110d2d142fa198fb8e043af95
Parents: eb7fbbd
Author: Richard Zang <rz...@apache.org>
Authored: Thu Jun 8 15:38:38 2017 -0700
Committer: Richard Zang <rz...@apache.org>
Committed: Thu Jun 8 16:16:02 2017 -0700
----------------------------------------------------------------------
ambari-web/app/config.js | 6 ++--
ambari-web/app/routes/add_service_routes.js | 2 +-
ambari-web/app/routes/main.js | 2 +-
.../app/templates/main/admin/kerberos.hbs | 34 +++++++++++---------
.../main/service/all_services_actions.hbs | 6 ++--
ambari-web/app/views/main/admin.js | 14 ++++----
.../main/admin/stack_upgrade/services_view.js | 2 +-
ambari-web/app/views/main/menu.js | 16 +++++----
ambari-web/app/views/main/service/item.js | 2 +-
.../admin/stack_upgrade/services_view_test.js | 1 +
10 files changed, 49 insertions(+), 36 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/57bb1365/ambari-web/app/config.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/config.js b/ambari-web/app/config.js
index e7190be..b87b36f 100644
--- a/ambari-web/app/config.js
+++ b/ambari-web/app/config.js
@@ -87,9 +87,11 @@ App.supports = {
addingNewRepository: false,
kerberosStackAdvisor: true,
logCountVizualization: false,
- manageJournalNode: true,
createAlerts: false,
- enabledWizardForHostOrderedUpgrade: true
+ enabledWizardForHostOrderedUpgrade: true,
+ manageJournalNode: true,
+ enableToggleKerberos: true,
+ enableAddDeleteServices: true
};
if (App.enableExperimental) {
http://git-wip-us.apache.org/repos/asf/ambari/blob/57bb1365/ambari-web/app/routes/add_service_routes.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/routes/add_service_routes.js b/ambari-web/app/routes/add_service_routes.js
index 1615f0d..75b3586 100644
--- a/ambari-web/app/routes/add_service_routes.js
+++ b/ambari-web/app/routes/add_service_routes.js
@@ -24,7 +24,7 @@ module.exports = App.WizardRoute.extend({
route: '/service/add',
enter: function (router) {
- if (App.isAuthorized('SERVICE.ADD_DELETE_SERVICES')) {
+ if (App.isAuthorized('SERVICE.ADD_DELETE_SERVICES') && App.supports.enableAddDeleteServices) {
// `getSecurityStatus` call is required to retrieve information related to kerberos type: Manual or automated kerberos
router.get('mainController').isLoading.call(router.get('clusterController'),'isClusterNameLoaded').done(function () {
App.router.get('mainAdminKerberosController').getSecurityStatus().always(function () {
http://git-wip-us.apache.org/repos/asf/ambari/blob/57bb1365/ambari-web/app/routes/main.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/routes/main.js b/ambari-web/app/routes/main.js
index 30cc8aa..7ed18de 100644
--- a/ambari-web/app/routes/main.js
+++ b/ambari-web/app/routes/main.js
@@ -460,7 +460,7 @@ module.exports = Em.Route.extend(App.RouterRedirections, {
route: '/kerberos',
enter: function (router, transition) {
- if (router.get('loggedIn') && !App.isAuthorized('CLUSTER.TOGGLE_KERBEROS')) {
+ if (router.get('loggedIn') && (!App.isAuthorized('CLUSTER.TOGGLE_KERBEROS') || !App.supports.enableToggleKerberos)) {
router.transitionTo('main.dashboard.index');
}
},
http://git-wip-us.apache.org/repos/asf/ambari/blob/57bb1365/ambari-web/app/templates/main/admin/kerberos.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/admin/kerberos.hbs b/ambari-web/app/templates/main/admin/kerberos.hbs
index e7bb618..2b41122 100644
--- a/ambari-web/app/templates/main/admin/kerberos.hbs
+++ b/ambari-web/app/templates/main/admin/kerberos.hbs
@@ -20,20 +20,22 @@
<div>
<p class="text-success">{{t admin.security.enabled}}
{{#isAuthorized "CLUSTER.TOGGLE_KERBEROS"}}
- <button class="btn btn-padding btn-warning admin-disable-security-btn" {{bindAttr disabled="isKerberosButtonsDisabled"}} {{action notifySecurityOffPopup target="controller"}}>{{t admin.kerberos.button.disable}} </button>
- {{#unless isManualKerberos}}
- <button class="btn btn-success" id="regenerate-keytabs" {{bindAttr disabled="isKerberosButtonsDisabled"}} {{action regenerateKeytabs target="controller"}}>
- <i class="glyphicon glyphicon-repeat"></i> {{t admin.kerberos.button.regenerateKeytabs}}</button>
- {{#if App.isCredentialStorePersistent}}
- <button class="btn btn-primary" {{action showManageKDCCredentialsPopup target="controller"}}>{{t admin.kerberos.credentials.store.menu.label}}</button>
+ {{#if App.supports.enableToggleKerberos}}
+ <button class="btn btn-padding btn-warning admin-disable-security-btn" {{bindAttr disabled="isKerberosButtonsDisabled"}} {{action notifySecurityOffPopup target="controller"}}>{{t admin.kerberos.button.disable}} </button>
+ {{#unless isManualKerberos}}
+ <button class="btn btn-success" id="regenerate-keytabs" {{bindAttr disabled="isKerberosButtonsDisabled"}} {{action regenerateKeytabs target="controller"}}>
+ <i class="glyphicon glyphicon-repeat"></i> {{t admin.kerberos.button.regenerateKeytabs}}</button>
+ {{#if App.isCredentialStorePersistent}}
+ <button class="btn btn-primary" {{action showManageKDCCredentialsPopup target="controller"}}>{{t admin.kerberos.credentials.store.menu.label}}</button>
+ {{/if}}
+ {{/unless}}
+ <br/>
+ {{#unless isEditMode}}
+ <a href="#" {{action makeConfigsEditable target="controller"}} class="pull-right">
+ {{t common.edit}}
+ </a>
+ {{/unless}}
{{/if}}
- {{/unless}}
- <br/>
- {{#unless isEditMode}}
- <a href="#" {{action makeConfigsEditable target="controller"}} class="pull-right">
- {{t common.edit}}
- </a>
- {{/unless}}
{{/isAuthorized}}
</p>
</div>
@@ -51,8 +53,10 @@
<div>
<p class="muted background-text">{{t admin.security.disabled}}
{{#isAuthorized "CLUSTER.TOGGLE_KERBEROS"}}
- <a class="btn btn-padding btn-success admin-enable-security-btn" {{action checkAndStartKerberosWizard target="controller"}}>{{t admin.kerberos.button.enable}} </a>
- <br/>
+ {{#if App.supports.enableToggleKerberos}}
+ <a class="btn btn-padding btn-success admin-enable-security-btn" {{action checkAndStartKerberosWizard target="controller"}}>{{t admin.kerberos.button.enable}} </a>
+ <br/>
+ {{/if}}
{{/isAuthorized}}
</p>
</div>
http://git-wip-us.apache.org/repos/asf/ambari/blob/57bb1365/ambari-web/app/templates/main/service/all_services_actions.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/main/service/all_services_actions.hbs b/ambari-web/app/templates/main/service/all_services_actions.hbs
index 3e87cb2..a9e122b 100644
--- a/ambari-web/app/templates/main/service/all_services_actions.hbs
+++ b/ambari-web/app/templates/main/service/all_services_actions.hbs
@@ -22,12 +22,14 @@
</div>
<ul class="dropdown-menu">
{{#isAuthorized "SERVICE.ADD_DELETE_SERVICES"}}
- <li {{bindAttr class="view.serviceController.isAllServicesInstalled:disabled"}}>
+ {{#if App.supports.enableAddDeleteServices}}
+ <li {{bindAttr class="view.serviceController.isAllServicesInstalled:disabled"}}>
<a href="#"
{{bindAttr class="view.serviceController.isAllServicesInstalled:disabled"}}
{{action gotoAddService target="view.serviceController"}}>
<i class="glyphicon glyphicon-plus"></i> {{t services.service.add}}</a>
- </li>
+ </li>
+ {{/if}}
{{/isAuthorized}}
{{#isAuthorized "SERVICE.START_STOP"}}
<li class="divider"></li>
http://git-wip-us.apache.org/repos/asf/ambari/blob/57bb1365/ambari-web/app/views/main/admin.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/admin.js b/ambari-web/app/views/main/admin.js
index 509f380..05d0f56 100644
--- a/ambari-web/app/views/main/admin.js
+++ b/ambari-web/app/views/main/admin.js
@@ -39,12 +39,14 @@ App.MainAdminView = Em.View.extend({
});
}
if (!App.get('isHadoopWindowsStack') && App.isAuthorized('CLUSTER.TOGGLE_KERBEROS') || (App.get('upgradeInProgress') || App.get('upgradeHolding')) ) {
- items.push({
- name: 'kerberos',
- url: 'adminKerberos.index',
- label: Em.I18n.t('common.kerberos'),
- disabled: App.get('upgradeInProgress') || App.get('upgradeHolding')
- });
+ if (App.supports.enableToggleKerberos) {
+ items.push({
+ name: 'kerberos',
+ url: 'adminKerberos.index',
+ label: Em.I18n.t('common.kerberos'),
+ disabled: App.get('upgradeInProgress') || App.get('upgradeHolding')
+ });
+ }
}
if ((App.isAuthorized('SERVICE.START_STOP, CLUSTER.MODIFY_CONFIGS') && App.isAuthorized('SERVICE.MANAGE_AUTO_START, CLUSTER.MANAGE_AUTO_START')) || (App.get('upgradeInProgress') || App.get('upgradeHolding'))) {
if (App.supports.serviceAutoStart) {
http://git-wip-us.apache.org/repos/asf/ambari/blob/57bb1365/ambari-web/app/views/main/admin/stack_upgrade/services_view.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/admin/stack_upgrade/services_view.js b/ambari-web/app/views/main/admin/stack_upgrade/services_view.js
index f566814..25efffe 100644
--- a/ambari-web/app/views/main/admin/stack_upgrade/services_view.js
+++ b/ambari-web/app/views/main/admin/stack_upgrade/services_view.js
@@ -56,7 +56,7 @@ App.MainAdminStackServicesView = Em.View.extend({
* @param event
*/
goToAddService: function (event) {
- if (!App.isAuthorized('SERVICE.ADD_DELETE_SERVICES')) {
+ if (!App.isAuthorized('SERVICE.ADD_DELETE_SERVICES') || !App.supports.enableAddDeleteServices) {
return;
} else if (event.context == "KERBEROS") {
App.router.get('mainAdminKerberosController').checkAndStartKerberosWizard();
http://git-wip-us.apache.org/repos/asf/ambari/blob/57bb1365/ambari-web/app/views/main/menu.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/menu.js b/ambari-web/app/views/main/menu.js
index 4bb53ae..32c4f6f 100644
--- a/ambari-web/app/views/main/menu.js
+++ b/ambari-web/app/views/main/menu.js
@@ -118,13 +118,15 @@ App.MainSideMenuView = Em.CollectionView.extend({
});
}
if (!App.get('isHadoopWindowsStack') && App.isAuthorized('CLUSTER.TOGGLE_KERBEROS') || upg) {
- categories.push({
- name: 'kerberos',
- url: 'kerberos/',
- label: Em.I18n.t('common.kerberos'),
- disabled: App.get('upgradeInProgress') || App.get('upgradeHolding'),
- href: router.urlFor('main.admin.adminKerberos')
- });
+ if (App.supports.enableToggleKerberos) {
+ categories.push({
+ name: 'kerberos',
+ url: 'kerberos/',
+ label: Em.I18n.t('common.kerberos'),
+ disabled: App.get('upgradeInProgress') || App.get('upgradeHolding'),
+ href: router.urlFor('main.admin.adminKerberos')
+ });
+ }
}
if ((App.isAuthorized('SERVICE.START_STOP, CLUSTER.MODIFY_CONFIGS') && App.isAuthorized('SERVICE.MANAGE_AUTO_START, CLUSTER.MANAGE_AUTO_START')) || upg) {
if (App.supports.serviceAutoStart) {
http://git-wip-us.apache.org/repos/asf/ambari/blob/57bb1365/ambari-web/app/views/main/service/item.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/service/item.js b/ambari-web/app/views/main/service/item.js
index e25ade1..43d75e6 100644
--- a/ambari-web/app/views/main/service/item.js
+++ b/ambari-web/app/views/main/service/item.js
@@ -289,7 +289,7 @@ App.MainServiceItemView = Em.View.extend({
options.push(actionMap.DOWNLOAD_CLIENT_CONFIGS);
}
- if (App.isAuthorized("SERVICE.ADD_DELETE_SERVICES")) {
+ if (App.isAuthorized("SERVICE.ADD_DELETE_SERVICES") && App.supports.enableAddDeleteServices) {
options.push(actionMap.DELETE_SERVICE);
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/57bb1365/ambari-web/test/views/main/admin/stack_upgrade/services_view_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/views/main/admin/stack_upgrade/services_view_test.js b/ambari-web/test/views/main/admin/stack_upgrade/services_view_test.js
index 70d182c..da75cf2 100644
--- a/ambari-web/test/views/main/admin/stack_upgrade/services_view_test.js
+++ b/ambari-web/test/views/main/admin/stack_upgrade/services_view_test.js
@@ -34,6 +34,7 @@ describe('App.MainAdminStackServicesView', function () {
sinon.stub(App.router, 'get').returns(mock);
sinon.spy(mock, 'checkAndStartKerberosWizard');
isAccessibleMock = sinon.stub(App, 'isAuthorized');
+ App.set('supports.enableAddDeleteServices', true);
});
afterEach(function() {
App.get('router').transitionTo.restore();
[8/9] ambari git commit: AMBARI-21128 Add AMS HA support to local
metrics aggregator application (dsen)
Posted by rl...@apache.org.
AMBARI-21128 Add AMS HA support to local metrics aggregator application (dsen)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/29f75089
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/29f75089
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/29f75089
Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 29f750894754ed2112fcedaa0b2f5ec693b5cd0e
Parents: 190ecad
Author: Dmytro Sen <ds...@apache.org>
Authored: Fri Jun 9 14:36:11 2017 +0300
Committer: Dmytro Sen <ds...@apache.org>
Committed: Fri Jun 9 14:36:11 2017 +0300
----------------------------------------------------------------------
.../timeline/AbstractTimelineMetricsSink.java | 4 +-
.../ambari-metrics-host-aggregator/pom.xml | 30 +++-
.../AbstractMetricPublisherThread.java | 134 ---------------
.../aggregator/AggregatedMetricsPublisher.java | 101 -----------
.../host/aggregator/AggregatorApplication.java | 98 +++++++----
.../host/aggregator/AggregatorWebService.java | 2 +-
.../host/aggregator/RawMetricsPublisher.java | 60 -------
.../host/aggregator/TimelineMetricsHolder.java | 26 ++-
.../sink/timeline/AbstractMetricPublisher.java | 169 +++++++++++++++++++
.../timeline/AggregatedMetricsPublisher.java | 103 +++++++++++
.../sink/timeline/RawMetricsPublisher.java | 65 +++++++
.../aggregator/AggregatorApplicationTest.java | 55 ++++++
.../aggregator/AggregatorWebServiceTest.java | 135 +++++++++++++++
.../aggregator/TimelineMetricsHolderTest.java | 107 ++++++++++++
.../timeline/AbstractMetricPublisherTest.java | 82 +++++++++
.../AggregatedMetricsPublisherTest.java | 154 +++++++++++++++++
.../sink/timeline/RawMetricsPublisherTest.java | 151 +++++++++++++++++
.../src/main/python/core/aggregator.py | 6 +-
.../src/main/python/core/controller.py | 2 +-
19 files changed, 1133 insertions(+), 351 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/29f75089/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
index fddf4b3..644d978 100644
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
+++ b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
@@ -88,7 +88,7 @@ public abstract class AbstractTimelineMetricsSink {
private static final String WWW_AUTHENTICATE = "WWW-Authenticate";
private static final String NEGOTIATE = "Negotiate";
- protected static final AtomicInteger failedCollectorConnectionsCounter = new AtomicInteger(0);
+ protected final AtomicInteger failedCollectorConnectionsCounter = new AtomicInteger(0);
public static int NUMBER_OF_SKIPPED_COLLECTOR_EXCEPTIONS = 100;
protected static final AtomicInteger nullCollectorCounter = new AtomicInteger(0);
public static int NUMBER_OF_NULL_COLLECTOR_EXCEPTIONS = 20;
@@ -120,7 +120,7 @@ public abstract class AbstractTimelineMetricsSink {
private volatile boolean isInitializedForHA = false;
@SuppressWarnings("all")
- private final int RETRY_COUNT_BEFORE_COLLECTOR_FAILOVER = 5;
+ private final int RETRY_COUNT_BEFORE_COLLECTOR_FAILOVER = 3;
private final Gson gson = new Gson();
http://git-wip-us.apache.org/repos/asf/ambari/blob/29f75089/ambari-metrics/ambari-metrics-host-aggregator/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-aggregator/pom.xml b/ambari-metrics/ambari-metrics-host-aggregator/pom.xml
index 0598bef..24432dd 100644
--- a/ambari-metrics/ambari-metrics-host-aggregator/pom.xml
+++ b/ambari-metrics/ambari-metrics-host-aggregator/pom.xml
@@ -38,12 +38,6 @@
<dependencies>
<dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <version>3.8.1</version>
- <scope>test</scope>
- </dependency>
- <dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>14.0.1</version>
@@ -83,6 +77,30 @@
<artifactId>hadoop-common</artifactId>
<version>2.7.1.2.3.4.0-3347</version>
</dependency>
+ <dependency>
+ <groupId>com.sun.jersey.jersey-test-framework</groupId>
+ <artifactId>jersey-test-framework-core</artifactId>
+ <version>1.11</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>com.sun.jersey.jersey-test-framework</groupId>
+ <artifactId>jersey-test-framework-grizzly2</artifactId>
+ <version>1.11</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.easymock</groupId>
+ <artifactId>easymock</artifactId>
+ <version>3.4</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>4.2</version>
+ <scope>test</scope>
+ </dependency>
</dependencies>
<build>
http://git-wip-us.apache.org/repos/asf/ambari/blob/29f75089/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/AbstractMetricPublisherThread.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/AbstractMetricPublisherThread.java b/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/AbstractMetricPublisherThread.java
deleted file mode 100644
index b1f60fa..0000000
--- a/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/AbstractMetricPublisherThread.java
+++ /dev/null
@@ -1,134 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics2.host.aggregator;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
-import org.codehaus.jackson.map.AnnotationIntrospector;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.map.annotate.JsonSerialize;
-import org.codehaus.jackson.xc.JaxbAnnotationIntrospector;
-
-import java.io.IOException;
-import java.io.OutputStream;
-import java.net.HttpURLConnection;
-import java.net.URL;
-import java.util.Map;
-
-/**
- * Abstract class that runs a thread that publishes metrics data to AMS collector in specified intervals.
- */
-public abstract class AbstractMetricPublisherThread extends Thread {
- protected int publishIntervalInSeconds;
- protected String publishURL;
- protected ObjectMapper objectMapper;
- private Log LOG;
- protected TimelineMetricsHolder timelineMetricsHolder;
-
- public AbstractMetricPublisherThread(TimelineMetricsHolder timelineMetricsHolder, String publishURL, int publishIntervalInSeconds) {
- LOG = LogFactory.getLog(this.getClass());
- this.publishURL = publishURL;
- this.publishIntervalInSeconds = publishIntervalInSeconds;
- this.timelineMetricsHolder = timelineMetricsHolder;
- objectMapper = new ObjectMapper();
- AnnotationIntrospector introspector = new JaxbAnnotationIntrospector();
- objectMapper.setAnnotationIntrospector(introspector);
- objectMapper.getSerializationConfig()
- .withSerializationInclusion(JsonSerialize.Inclusion.NON_NULL);
- }
-
- /**
- * Publishes metrics to collector in specified intervals while not interrupted.
- */
- @Override
- public void run() {
- while (!isInterrupted()) {
- try {
- sleep(this.publishIntervalInSeconds * 1000);
- } catch (InterruptedException e) {
- //Ignore
- }
- try {
- processAndPublishMetrics(getMetricsFromCache());
- } catch (Exception e) {
- LOG.error("Couldn't process and send metrics : ",e);
- }
- }
- }
-
- /**
- * Processes and sends metrics to collector.
- * @param metricsFromCache
- * @throws Exception
- */
- protected void processAndPublishMetrics(Map<Long, TimelineMetrics> metricsFromCache) throws Exception {
- if (metricsFromCache.size()==0) return;
-
- LOG.info(String.format("Preparing %s timeline metrics for publishing", metricsFromCache.size()));
- publishMetricsJson(processMetrics(metricsFromCache));
- }
-
- /**
- * Returns metrics map. Source is based on implementation.
- * @return
- */
- protected abstract Map<Long,TimelineMetrics> getMetricsFromCache();
-
- /**
- * Processes given metrics (aggregates or merges them) and converts them into json string that will be send to collector
- * @param metricValues
- * @return
- */
- protected abstract String processMetrics(Map<Long, TimelineMetrics> metricValues);
-
- protected void publishMetricsJson(String jsonData) throws Exception {
- int timeout = 5 * 1000;
- HttpURLConnection connection = null;
- if (this.publishURL == null) {
- throw new IOException("Unknown URL. Unable to connect to metrics collector.");
- }
- LOG.info("Collector URL : " + publishURL);
- connection = (HttpURLConnection) new URL(this.publishURL).openConnection();
-
- connection.setRequestMethod("POST");
- connection.setRequestProperty("Content-Type", "application/json");
- connection.setRequestProperty("Connection", "Keep-Alive");
- connection.setConnectTimeout(timeout);
- connection.setReadTimeout(timeout);
- connection.setDoOutput(true);
-
- if (jsonData != null) {
- try (OutputStream os = connection.getOutputStream()) {
- os.write(jsonData.getBytes("UTF-8"));
- }
- }
- int responseCode = connection.getResponseCode();
- if (responseCode != 200) {
- throw new Exception("responseCode is " + responseCode);
- }
- LOG.info("Successfully sent metrics.");
- }
-
- /**
- * Interrupts the thread.
- */
- protected void stopPublisher() {
- this.interrupt();
- }
-}
http://git-wip-us.apache.org/repos/asf/ambari/blob/29f75089/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/AggregatedMetricsPublisher.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/AggregatedMetricsPublisher.java b/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/AggregatedMetricsPublisher.java
deleted file mode 100644
index 0540ec9..0000000
--- a/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/AggregatedMetricsPublisher.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics2.host.aggregator;
-
-
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.metrics2.sink.timeline.AggregationResult;
-import org.apache.hadoop.metrics2.sink.timeline.MetricHostAggregate;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetricWithAggregatedValues;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
-
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeMap;
-
-/**
- * Thread that aggregates and publishes metrics to collector on specified interval.
- */
-public class AggregatedMetricsPublisher extends AbstractMetricPublisherThread {
-
- private Log LOG;
-
- public AggregatedMetricsPublisher(TimelineMetricsHolder timelineMetricsHolder, String collectorURL, int interval) {
- super(timelineMetricsHolder, collectorURL, interval);
- LOG = LogFactory.getLog(this.getClass());
- }
-
- /**
- * get metrics map form @TimelineMetricsHolder
- * @return
- */
- @Override
- protected Map<Long, TimelineMetrics> getMetricsFromCache() {
- return timelineMetricsHolder.extractMetricsForAggregationPublishing();
- }
-
- /**
- * Aggregates given metrics and converts them into json string that will be send to collector
- * @param metricForAggregationValues
- * @return
- */
- @Override
- protected String processMetrics(Map<Long, TimelineMetrics> metricForAggregationValues) {
- HashMap<String, TimelineMetrics> nameToMetricMap = new HashMap<>();
- for (TimelineMetrics timelineMetrics : metricForAggregationValues.values()) {
- for (TimelineMetric timelineMetric : timelineMetrics.getMetrics()) {
- if (!nameToMetricMap.containsKey(timelineMetric.getMetricName())) {
- nameToMetricMap.put(timelineMetric.getMetricName(), new TimelineMetrics());
- }
- nameToMetricMap.get(timelineMetric.getMetricName()).addOrMergeTimelineMetric(timelineMetric);
- }
- }
- Set<TimelineMetricWithAggregatedValues> metricAggregateMap = new HashSet<>();
- for (TimelineMetrics metrics : nameToMetricMap.values()) {
- double sum = 0;
- double max = Integer.MIN_VALUE;
- double min = Integer.MAX_VALUE;
- int count = 0;
- for (TimelineMetric metric : metrics.getMetrics()) {
- for (Double value : metric.getMetricValues().values()) {
- sum+=value;
- max = Math.max(max, value);
- min = Math.min(min, value);
- count++;
- }
- }
- TimelineMetric tmpMetric = new TimelineMetric(metrics.getMetrics().get(0));
- tmpMetric.setMetricValues(new TreeMap<Long, Double>());
- metricAggregateMap.add(new TimelineMetricWithAggregatedValues(tmpMetric, new MetricHostAggregate(sum, count, 0d, max, min)));
- }
- String json = null;
- try {
- json = objectMapper.writeValueAsString(new AggregationResult(metricAggregateMap, System.currentTimeMillis()));
- LOG.debug(json);
- } catch (Exception e) {
- LOG.error("Failed to convert result into json", e);
- }
-
- return json;
- }
-}
http://git-wip-us.apache.org/repos/asf/ambari/blob/29f75089/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/AggregatorApplication.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/AggregatorApplication.java b/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/AggregatorApplication.java
index c6b703b..1e5cc82 100644
--- a/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/AggregatorApplication.java
+++ b/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/AggregatorApplication.java
@@ -33,6 +33,9 @@ import java.util.HashMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.metrics2.sink.timeline.AbstractMetricPublisher;
+import org.apache.hadoop.metrics2.sink.timeline.AggregatedMetricsPublisher;
+import org.apache.hadoop.metrics2.sink.timeline.RawMetricsPublisher;
/**
* WEB application with 2 publisher threads that processes received metrics and submits results to the collector
@@ -40,24 +43,25 @@ import org.apache.hadoop.conf.Configuration;
public class AggregatorApplication
{
private static final int STOP_SECONDS_DELAY = 0;
- private static final int JOIN_SECONDS_TIMEOUT = 2;
- private static String BASE_POST_URL = "%s://%s:%s/ws/v1/timeline/metrics";
- private static String AGGREGATED_POST_PREFIX = "/aggregated";
+ private static final int JOIN_SECONDS_TIMEOUT = 5;
private static final String METRICS_SITE_CONFIGURATION_FILE = "ams-site.xml";
- private static Log LOG = LogFactory.getLog("AggregatorApplication.class");
+ private Log LOG;
private final int webApplicationPort;
private final int rawPublishingInterval;
private final int aggregationInterval;
private Configuration configuration;
- private String [] collectorHosts;
- private AggregatedMetricsPublisher aggregatePublisher;
- private RawMetricsPublisher rawPublisher;
+ private Thread aggregatePublisherThread;
+ private Thread rawPublisherThread;
private TimelineMetricsHolder timelineMetricsHolder;
private HttpServer httpServer;
- public AggregatorApplication(String collectorHosts) {
+ public AggregatorApplication(String hostname, String collectorHosts) {
+ LOG = LogFactory.getLog(this.getClass());
+ configuration = new Configuration(true);
initConfiguration();
- this.collectorHosts = collectorHosts.split(",");
+ configuration.set("timeline.metrics.collector.hosts", collectorHosts);
+ configuration.set("timeline.metrics.hostname", hostname);
+ configuration.set("timeline.metrics.zk.quorum", getZkQuorumFromConfiguration());
this.aggregationInterval = configuration.getInt("timeline.metrics.host.aggregator.minute.interval", 300);
this.rawPublishingInterval = configuration.getInt("timeline.metrics.sink.report.interval", 60);
this.webApplicationPort = configuration.getInt("timeline.metrics.host.inmemory.aggregation.port", 61888);
@@ -70,7 +74,13 @@ public class AggregatorApplication
}
}
- private void initConfiguration() {
+ private String getZkQuorumFromConfiguration() {
+ String zkClientPort = configuration.getTrimmed("cluster.zookeeper.property.clientPort", "2181");
+ String zkServerHosts = configuration.getTrimmed("cluster.zookeeper.quorum", "");
+ return getZkConnectionUrl(zkClientPort, zkServerHosts);
+ }
+
+ protected void initConfiguration() {
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
if (classLoader == null) {
classLoader = getClass().getClassLoader();
@@ -82,7 +92,7 @@ public class AggregatorApplication
throw new IllegalStateException("Unable to initialize the metrics " +
"subsystem. No ams-site present in the classpath.");
}
- configuration = new Configuration(true);
+
try {
configuration.addResource(amsResUrl.toURI().toURL());
} catch (Exception e) {
@@ -91,7 +101,7 @@ public class AggregatorApplication
}
}
- private String getHostName() {
+ protected String getHostName() {
String hostName = "localhost";
try {
hostName = InetAddress.getLocalHost().getCanonicalHostName();
@@ -101,13 +111,13 @@ public class AggregatorApplication
return hostName;
}
- private URI getURI() {
+ protected URI getURI() {
URI uri = UriBuilder.fromUri("http://" + getHostName() + "/").port(this.webApplicationPort).build();
LOG.info(String.format("Web server at %s", uri));
return uri;
}
- private HttpServer createHttpServer() throws IOException {
+ protected HttpServer createHttpServer() throws IOException {
ResourceConfig resourceConfig = new PackagesResourceConfig("org.apache.hadoop.metrics2.host.aggregator");
HashMap<String, Object> params = new HashMap();
params.put("com.sun.jersey.api.json.POJOMappingFeature", "true");
@@ -122,29 +132,30 @@ public class AggregatorApplication
private void startAggregatePublisherThread() {
LOG.info("Starting aggregated metrics publisher.");
- String collectorURL = buildBasicCollectorURL(collectorHosts[0]) + AGGREGATED_POST_PREFIX;
- aggregatePublisher = new AggregatedMetricsPublisher(timelineMetricsHolder, collectorURL, aggregationInterval);
- aggregatePublisher.start();
+ AbstractMetricPublisher metricPublisher = new AggregatedMetricsPublisher(timelineMetricsHolder, configuration, aggregationInterval);
+ aggregatePublisherThread = new Thread(metricPublisher);
+ aggregatePublisherThread.start();
}
private void startRawPublisherThread() {
LOG.info("Starting raw metrics publisher.");
- String collectorURL = buildBasicCollectorURL(collectorHosts[0]);
- rawPublisher = new RawMetricsPublisher(timelineMetricsHolder, collectorURL, rawPublishingInterval);
- rawPublisher.start();
+ AbstractMetricPublisher metricPublisher = new RawMetricsPublisher(timelineMetricsHolder, configuration, rawPublishingInterval);
+ rawPublisherThread = aggregatePublisherThread = new Thread(metricPublisher);
+ aggregatePublisherThread.start();
}
private void stop() {
- aggregatePublisher.stopPublisher();
- rawPublisher.stopPublisher();
+ LOG.info("Stopping aggregator application");
+ aggregatePublisherThread.interrupt();
+ rawPublisherThread.interrupt();
httpServer.stop(STOP_SECONDS_DELAY);
LOG.info("Stopped web server.");
try {
LOG.info("Waiting for threads to join.");
- aggregatePublisher.join(JOIN_SECONDS_TIMEOUT * 1000);
- rawPublisher.join(JOIN_SECONDS_TIMEOUT * 1000);
+ aggregatePublisherThread.join(JOIN_SECONDS_TIMEOUT * 1000);
+ rawPublisherThread.join(JOIN_SECONDS_TIMEOUT * 1000);
LOG.info("Gracefully stopped Aggregator Application.");
} catch (InterruptedException e) {
LOG.error("Received exception during stop : ", e);
@@ -153,28 +164,43 @@ public class AggregatorApplication
}
- private String buildBasicCollectorURL(String host) {
- String port = configuration.get("timeline.metrics.service.webapp.address", "0.0.0.0:6188").split(":")[1];
- String protocol = configuration.get("timeline.metrics.service.http.policy", "HTTP_ONLY").equalsIgnoreCase("HTTP_ONLY") ? "http" : "https";
- return String.format(BASE_POST_URL, protocol, host, port);
+ private String getZkConnectionUrl(String zkClientPort, String zkQuorum) {
+ StringBuilder sb = new StringBuilder();
+ String[] quorumParts = zkQuorum.split(",");
+ String prefix = "";
+ for (String part : quorumParts) {
+ sb.append(prefix);
+ sb.append(part.trim());
+ if (!part.contains(":")) {
+ sb.append(":");
+ sb.append(zkClientPort);
+ }
+ prefix = ",";
+ }
+ return sb.toString();
}
public static void main( String[] args ) throws Exception {
- LOG.info("Starting aggregator application");
- if (args.length != 1) {
- throw new Exception("This jar should be run with 1 argument - collector hosts separated with coma");
+ if (args.length != 2) {
+ throw new Exception("This jar should be executed with 2 arguments : 1st - current host name, " +
+ "2nd - collector hosts separated with coma");
}
- final AggregatorApplication app = new AggregatorApplication(args[0]);
- app.startAggregatePublisherThread();
- app.startRawPublisherThread();
- app.startWebServer();
+ final AggregatorApplication app = new AggregatorApplication(args[0], args[1]);
+
+ app.startWebServerAndPublishersThreads();
Runtime.getRuntime().addShutdownHook(new Thread() {
public void run() {
- LOG.info("Stopping aggregator application");
app.stop();
}
});
}
+
+ private void startWebServerAndPublishersThreads() {
+ LOG.info("Starting aggregator application");
+ startAggregatePublisherThread();
+ startRawPublisherThread();
+ startWebServer();
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/29f75089/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/AggregatorWebService.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/AggregatorWebService.java b/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/AggregatorWebService.java
index f96d0ed..b151209 100644
--- a/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/AggregatorWebService.java
+++ b/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/AggregatorWebService.java
@@ -39,7 +39,7 @@ public class AggregatorWebService {
@GET
@Produces("text/json")
@Path("/metrics")
- public Response helloWorld() throws IOException {
+ public Response getOkResponse() throws IOException {
return Response.ok().build();
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/29f75089/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/RawMetricsPublisher.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/RawMetricsPublisher.java b/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/RawMetricsPublisher.java
deleted file mode 100644
index f317ed9..0000000
--- a/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/RawMetricsPublisher.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metrics2.host.aggregator;
-
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
-import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
-
-import java.util.Map;
-
-public class RawMetricsPublisher extends AbstractMetricPublisherThread {
- private final Log LOG;
-
- public RawMetricsPublisher(TimelineMetricsHolder timelineMetricsHolder, String collectorURL, int interval) {
- super(timelineMetricsHolder, collectorURL, interval);
- LOG = LogFactory.getLog(this.getClass());
- }
-
-
- @Override
- protected Map<Long, TimelineMetrics> getMetricsFromCache() {
- return timelineMetricsHolder.extractMetricsForRawPublishing();
- }
-
- @Override
- protected String processMetrics(Map<Long, TimelineMetrics> metricValues) {
- //merge everything in one TimelineMetrics object
- TimelineMetrics timelineMetrics = new TimelineMetrics();
- for (TimelineMetrics metrics : metricValues.values()) {
- for (TimelineMetric timelineMetric : metrics.getMetrics())
- timelineMetrics.addOrMergeTimelineMetric(timelineMetric);
- }
- //map TimelineMetrics to json string
- String json = null;
- try {
- json = objectMapper.writeValueAsString(timelineMetrics);
- LOG.debug(json);
- } catch (Exception e) {
- LOG.error("Failed to convert result into json", e);
- }
- return json;
- }
-}
http://git-wip-us.apache.org/repos/asf/ambari/blob/29f75089/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/TimelineMetricsHolder.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/TimelineMetricsHolder.java b/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/TimelineMetricsHolder.java
index b355c97..03b6542 100644
--- a/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/TimelineMetricsHolder.java
+++ b/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/host/aggregator/TimelineMetricsHolder.java
@@ -19,8 +19,10 @@ package org.apache.hadoop.metrics2.host.aggregator;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
+import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.concurrent.TimeUnit;
@@ -33,8 +35,8 @@ import java.util.concurrent.locks.ReentrantReadWriteLock;
public class TimelineMetricsHolder {
private static final int DEFAULT_RAW_CACHE_EXPIRE_TIME = 60;
private static final int DEFAULT_AGGREGATION_CACHE_EXPIRE_TIME = 300;
- private Cache<Long, TimelineMetrics> aggregationMetricsCache;
- private Cache<Long, TimelineMetrics> rawMetricsCache;
+ private Cache<String, TimelineMetrics> aggregationMetricsCache;
+ private Cache<String, TimelineMetrics> rawMetricsCache;
private static TimelineMetricsHolder instance = null;
//to ensure no metric values are expired
private static int EXPIRE_DELAY = 30;
@@ -63,21 +65,29 @@ public class TimelineMetricsHolder {
public void putMetricsForAggregationPublishing(TimelineMetrics timelineMetrics) {
aggregationCacheLock.writeLock().lock();
- aggregationMetricsCache.put(System.currentTimeMillis(), timelineMetrics);
+ aggregationMetricsCache.put(calculateCacheKey(timelineMetrics), timelineMetrics);
aggregationCacheLock.writeLock().unlock();
}
- public Map<Long, TimelineMetrics> extractMetricsForAggregationPublishing() {
+ private String calculateCacheKey(TimelineMetrics timelineMetrics) {
+ List<TimelineMetric> metrics = timelineMetrics.getMetrics();
+ if (metrics.size() > 0) {
+ return metrics.get(0).getAppId() + System.currentTimeMillis();
+ }
+ return String.valueOf(System.currentTimeMillis());
+ }
+
+ public Map<String, TimelineMetrics> extractMetricsForAggregationPublishing() {
return extractMetricsFromCacheWithLock(aggregationMetricsCache, aggregationCacheLock);
}
public void putMetricsForRawPublishing(TimelineMetrics metrics) {
rawCacheLock.writeLock().lock();
- rawMetricsCache.put(System.currentTimeMillis(), metrics);
+ rawMetricsCache.put(calculateCacheKey(metrics), metrics);
rawCacheLock.writeLock().unlock();
}
- public Map<Long, TimelineMetrics> extractMetricsForRawPublishing() {
+ public Map<String, TimelineMetrics> extractMetricsForRawPublishing() {
return extractMetricsFromCacheWithLock(rawMetricsCache, rawCacheLock);
}
@@ -87,9 +97,9 @@ public class TimelineMetricsHolder {
* @param lock
* @return
*/
- private Map<Long, TimelineMetrics> extractMetricsFromCacheWithLock(Cache<Long, TimelineMetrics> cache, ReadWriteLock lock) {
+ private Map<String, TimelineMetrics> extractMetricsFromCacheWithLock(Cache<String, TimelineMetrics> cache, ReadWriteLock lock) {
lock.writeLock().lock();
- Map<Long, TimelineMetrics> metricsMap = new TreeMap<>(cache.asMap());
+ Map<String, TimelineMetrics> metricsMap = new TreeMap<>(cache.asMap());
cache.invalidateAll();
lock.writeLock().unlock();
return metricsMap;
http://git-wip-us.apache.org/repos/asf/ambari/blob/29f75089/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractMetricPublisher.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractMetricPublisher.java b/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractMetricPublisher.java
new file mode 100644
index 0000000..5af115f
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractMetricPublisher.java
@@ -0,0 +1,169 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.metrics2.sink.timeline;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.metrics2.host.aggregator.TimelineMetricsHolder;
+
+import java.util.Collection;
+import java.util.Map;
+
+/**
+ * Abstract class that runs a thread that publishes metrics data to AMS collector in specified intervals.
+ */
+public abstract class AbstractMetricPublisher extends AbstractTimelineMetricsSink implements Runnable {
+
+ private static final String AMS_SITE_SSL_KEYSTORE_PATH_PROPERTY = "ssl.server.truststore.location";
+ private static final String AMS_SITE_SSL_KEYSTORE_TYPE_PROPERTY = "ssl.server.truststore.password";
+ private static final String AMS_SITE_SSL_KEYSTORE_PASSWORD_PROPERTY = "ssl.server.truststore.type";
+ private static final String AMS_SITE_HTTP_POLICY_PROPERTY = "timeline.metrics.service.http.policy";
+ private static final String AMS_SITE_COLLECTOR_WEBAPP_ADDRESS_PROPERTY = "timeline.metrics.service.webapp.address";
+ private static final String PUBLISHER_COLLECTOR_HOSTS_PROPERTY = "timeline.metrics.collector.hosts";
+ private static final String PUBLISHER_ZOOKEEPER_QUORUM_PROPERTY = "timeline.metrics.zk.quorum";
+ private static final String PUBLISHER_HOSTNAME_PROPERTY = "timeline.metrics.hostname";
+ protected static String BASE_POST_URL = "%s://%s:%s/ws/v1/timeline/metrics";
+ protected int publishIntervalInSeconds;
+ private Log LOG;
+ protected TimelineMetricsHolder timelineMetricsHolder;
+ protected Configuration configuration;
+ private String collectorProtocol;
+ private String collectorPort;
+ private Collection<String> collectorHosts;
+ private String hostname;
+ private String zkQuorum;
+
+ public AbstractMetricPublisher(TimelineMetricsHolder timelineMetricsHolder, Configuration configuration, int publishIntervalInSeconds) {
+ LOG = LogFactory.getLog(this.getClass());
+ this.configuration = configuration;
+ this.publishIntervalInSeconds = publishIntervalInSeconds;
+ this.timelineMetricsHolder = timelineMetricsHolder;
+ configure();
+ }
+
+ protected void configure() {
+ collectorProtocol = configuration.get(AMS_SITE_HTTP_POLICY_PROPERTY, "HTTP_ONLY").equalsIgnoreCase("HTTP_ONLY") ? "http" : "https";
+ collectorPort = configuration.getTrimmed(AMS_SITE_COLLECTOR_WEBAPP_ADDRESS_PROPERTY, "0.0.0.0:6188").split(":")[1];
+ collectorHosts = parseHostsStringIntoCollection(configuration.getTrimmed(PUBLISHER_COLLECTOR_HOSTS_PROPERTY, ""));
+ zkQuorum = configuration.get(PUBLISHER_ZOOKEEPER_QUORUM_PROPERTY, "");
+ hostname = configuration.get(PUBLISHER_HOSTNAME_PROPERTY, "localhost");
+ collectorHosts = parseHostsStringIntoCollection(configuration.get(PUBLISHER_COLLECTOR_HOSTS_PROPERTY, ""));
+ if (collectorHosts.isEmpty()) {
+ LOG.error("No Metric collector configured.");
+ } else {
+ if (collectorProtocol.contains("https")) {
+ String trustStorePath = configuration.get(AMS_SITE_SSL_KEYSTORE_PATH_PROPERTY).trim();
+ String trustStoreType = configuration.get(AMS_SITE_SSL_KEYSTORE_TYPE_PROPERTY).trim();
+ String trustStorePwd = configuration.get(AMS_SITE_SSL_KEYSTORE_PASSWORD_PROPERTY).trim();
+ loadTruststore(trustStorePath, trustStoreType, trustStorePwd);
+ }
+ }
+ }
+
+ /**
+ * Publishes metrics to collector in specified intervals while not interrupted.
+ */
+ @Override
+ public void run() {
+ while (!Thread.currentThread().isInterrupted()) {
+ try {
+ Thread.sleep(this.publishIntervalInSeconds * 1000);
+ } catch (InterruptedException e) {
+ //Ignore
+ }
+ try {
+ processAndPublishMetrics(getMetricsFromCache());
+ } catch (Exception e) {
+ //ignore
+ }
+ }
+ }
+
+ /**
+ * Processes and sends metrics to collector.
+ * @param metricsFromCache
+ * @throws Exception
+ */
+ protected void processAndPublishMetrics(Map<String, TimelineMetrics> metricsFromCache) throws Exception {
+ if (metricsFromCache.size()==0) return;
+
+ LOG.info(String.format("Preparing %s timeline metrics for publishing", metricsFromCache.size()));
+ emitMetricsJson(getCollectorUri(getCurrentCollectorHost()), processMetrics(metricsFromCache));
+ }
+
+ /**
+ * Returns metrics map. Source is based on implementation.
+ * @return
+ */
+ protected abstract Map<String,TimelineMetrics> getMetricsFromCache();
+
+ /**
+ * Processes given metrics (aggregates or merges them) and converts them into json string that will be send to collector
+ * @param metricValues
+ * @return
+ */
+ protected abstract String processMetrics(Map<String, TimelineMetrics> metricValues);
+
+ protected abstract String getPostUrl();
+
+ @Override
+ protected String getCollectorUri(String host) {
+ return String.format(getPostUrl(), getCollectorProtocol(), host, getCollectorPort());
+ }
+
+ @Override
+ protected String getCollectorProtocol() {
+ return collectorProtocol;
+ }
+
+ @Override
+ protected String getCollectorPort() {
+ return collectorPort;
+ }
+
+ @Override
+ protected int getTimeoutSeconds() {
+ return DEFAULT_POST_TIMEOUT_SECONDS;
+ }
+
+ @Override
+ protected String getZookeeperQuorum() {
+ return zkQuorum;
+ }
+
+ @Override
+ protected Collection<String> getConfiguredCollectorHosts() {
+ return collectorHosts;
+ }
+
+ @Override
+ protected String getHostname() {
+ return hostname;
+ }
+
+ @Override
+ protected boolean isHostInMemoryAggregationEnabled() {
+ return false;
+ }
+
+ @Override
+ protected int getHostInMemoryAggregationPort() {
+ return 0;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/29f75089/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AggregatedMetricsPublisher.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AggregatedMetricsPublisher.java b/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AggregatedMetricsPublisher.java
new file mode 100644
index 0000000..c8dffab
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AggregatedMetricsPublisher.java
@@ -0,0 +1,103 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.metrics2.sink.timeline;
+
+
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.metrics2.host.aggregator.TimelineMetricsHolder;
+
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeMap;
+
+/**
+ * Thread that aggregates and publishes metrics to collector on specified interval.
+ */
+public class AggregatedMetricsPublisher extends AbstractMetricPublisher {
+ private static String AGGREGATED_POST_PREFIX = "/aggregated";
+ private Log LOG;
+
+ public AggregatedMetricsPublisher(TimelineMetricsHolder timelineMetricsHolder, Configuration configuration, int interval) {
+ super(timelineMetricsHolder, configuration, interval);
+ LOG = LogFactory.getLog(this.getClass());
+ }
+
+ /**
+ * get metrics map form @TimelineMetricsHolder
+ * @return
+ */
+ @Override
+ protected Map<String, TimelineMetrics> getMetricsFromCache() {
+ return timelineMetricsHolder.extractMetricsForAggregationPublishing();
+ }
+
+ /**
+ * Aggregates given metrics and converts them into json string that will be send to collector
+ * @param metricForAggregationValues
+ * @return
+ */
+ @Override
+ protected String processMetrics(Map<String, TimelineMetrics> metricForAggregationValues) {
+ HashMap<String, TimelineMetrics> nameToMetricMap = new HashMap<>();
+ for (TimelineMetrics timelineMetrics : metricForAggregationValues.values()) {
+ for (TimelineMetric timelineMetric : timelineMetrics.getMetrics()) {
+ if (!nameToMetricMap.containsKey(timelineMetric.getMetricName())) {
+ nameToMetricMap.put(timelineMetric.getMetricName(), new TimelineMetrics());
+ }
+ nameToMetricMap.get(timelineMetric.getMetricName()).addOrMergeTimelineMetric(timelineMetric);
+ }
+ }
+ Set<TimelineMetricWithAggregatedValues> metricAggregateMap = new HashSet<>();
+ for (TimelineMetrics metrics : nameToMetricMap.values()) {
+ double sum = 0;
+ double max = Integer.MIN_VALUE;
+ double min = Integer.MAX_VALUE;
+ int count = 0;
+ for (TimelineMetric metric : metrics.getMetrics()) {
+ for (Double value : metric.getMetricValues().values()) {
+ sum+=value;
+ max = Math.max(max, value);
+ min = Math.min(min, value);
+ count++;
+ }
+ }
+ TimelineMetric tmpMetric = new TimelineMetric(metrics.getMetrics().get(0));
+ tmpMetric.setMetricValues(new TreeMap<Long, Double>());
+ metricAggregateMap.add(new TimelineMetricWithAggregatedValues(tmpMetric, new MetricHostAggregate(sum, count, 0d, max, min)));
+ }
+ String json = null;
+ try {
+ json = mapper.writeValueAsString(new AggregationResult(metricAggregateMap, System.currentTimeMillis()));
+ LOG.debug(json);
+ } catch (Exception e) {
+ LOG.error("Failed to convert result into json", e);
+ }
+
+ return json;
+ }
+
+ @Override
+ protected String getPostUrl() {
+ return BASE_POST_URL + AGGREGATED_POST_PREFIX;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/29f75089/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/sink/timeline/RawMetricsPublisher.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/sink/timeline/RawMetricsPublisher.java b/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/sink/timeline/RawMetricsPublisher.java
new file mode 100644
index 0000000..89addb7
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-host-aggregator/src/main/java/org/apache/hadoop/metrics2/sink/timeline/RawMetricsPublisher.java
@@ -0,0 +1,65 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.metrics2.sink.timeline;
+
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.metrics2.host.aggregator.TimelineMetricsHolder;
+
+import java.util.Map;
+
+public class RawMetricsPublisher extends AbstractMetricPublisher {
+ private final Log LOG;
+
+ public RawMetricsPublisher(TimelineMetricsHolder timelineMetricsHolder, Configuration configuration, int interval) {
+ super(timelineMetricsHolder, configuration, interval);
+ LOG = LogFactory.getLog(this.getClass());
+ }
+
+
+ @Override
+ protected Map<String, TimelineMetrics> getMetricsFromCache() {
+ return timelineMetricsHolder.extractMetricsForRawPublishing();
+ }
+
+ @Override
+ protected String processMetrics(Map<String, TimelineMetrics> metricValues) {
+ //merge everything in one TimelineMetrics object
+ TimelineMetrics timelineMetrics = new TimelineMetrics();
+ for (TimelineMetrics metrics : metricValues.values()) {
+ for (TimelineMetric timelineMetric : metrics.getMetrics())
+ timelineMetrics.addOrMergeTimelineMetric(timelineMetric);
+ }
+ //map TimelineMetrics to json string
+ String json = null;
+ try {
+ json = mapper.writeValueAsString(timelineMetrics);
+ LOG.debug(json);
+ } catch (Exception e) {
+ LOG.error("Failed to convert result into json", e);
+ }
+ return json;
+ }
+
+ @Override
+ protected String getPostUrl() {
+ return BASE_POST_URL;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/29f75089/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/host/aggregator/AggregatorApplicationTest.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/host/aggregator/AggregatorApplicationTest.java b/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/host/aggregator/AggregatorApplicationTest.java
new file mode 100644
index 0000000..ea72d17
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/host/aggregator/AggregatorApplicationTest.java
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.metrics2.host.aggregator;
+
+import junit.framework.Assert;
+import org.junit.Test;
+
+import java.net.URI;
+
+import static org.easymock.EasyMock.createMockBuilder;
+
+
+public class AggregatorApplicationTest {
+ @Test
+ public void testMainNotEnoughArguments() {
+ try {
+ AggregatorApplication.main(new String[0]);
+ throw new Exception("Should not be thrown");
+ } catch (Exception e) {
+ //expected
+ }
+ try {
+ AggregatorApplication.main(new String[1]);
+ throw new Exception("Should not be thrown");
+ } catch (Exception e) {
+ //expected
+ }
+ }
+
+ @Test
+ public void testGetURI() {
+ AggregatorApplication aggregatorApplicationMock = createMockBuilder(AggregatorApplication.class)
+ .withConstructor("", "")
+ .addMockedMethod("createHttpServer")
+ .addMockedMethod("initConfiguration").createMock();
+
+ URI uri = aggregatorApplicationMock.getURI();
+ Assert.assertEquals("http://" + aggregatorApplicationMock.getHostName() + ":61888/", uri.toString());
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/29f75089/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/host/aggregator/AggregatorWebServiceTest.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/host/aggregator/AggregatorWebServiceTest.java b/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/host/aggregator/AggregatorWebServiceTest.java
new file mode 100644
index 0000000..736fd06
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/host/aggregator/AggregatorWebServiceTest.java
@@ -0,0 +1,135 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.host.aggregator;
+
+
+import com.sun.jersey.api.client.ClientResponse;
+import com.sun.jersey.api.client.WebResource;
+import com.sun.jersey.api.client.config.DefaultClientConfig;
+import com.sun.jersey.test.framework.JerseyTest;
+import com.sun.jersey.test.framework.WebAppDescriptor;
+import com.sun.jersey.test.framework.spi.container.TestContainerFactory;
+import com.sun.jersey.test.framework.spi.container.grizzly2.GrizzlyTestContainerFactory;
+import junit.framework.Assert;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
+import org.codehaus.jackson.jaxrs.JacksonJaxbJsonProvider;
+import org.junit.Test;
+
+
+import javax.ws.rs.core.MediaType;
+
+import java.util.Collection;
+import java.util.Map;
+
+import static org.junit.Assert.assertEquals;
+
+
+public class AggregatorWebServiceTest extends JerseyTest {
+ public AggregatorWebServiceTest() {
+ super(new WebAppDescriptor.Builder(
+ "org.apache.hadoop.metrics2.host.aggregator")
+ .contextPath("jersey-guice-filter")
+ .servletPath("/")
+ .clientConfig(new DefaultClientConfig(JacksonJaxbJsonProvider.class))
+ .build());
+ }
+
+ @Override
+ public TestContainerFactory getTestContainerFactory() {
+ return new GrizzlyTestContainerFactory();
+ }
+
+ @Test
+ public void testOkResponse() {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("timeline").path("metrics")
+ .accept("text/json")
+ .get(ClientResponse.class);
+ assertEquals(200, response.getStatus());
+ assertEquals("text/json", response.getType().toString());
+ }
+
+ @Test
+ public void testWrongPath() {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("timeline").path("metrics").path("aggregated")
+ .accept("text/json")
+ .get(ClientResponse.class);
+ assertEquals(404, response.getStatus());
+ }
+
+
+ @Test
+ public void testMetricsPost() {
+ TimelineMetricsHolder timelineMetricsHolder = TimelineMetricsHolder.getInstance();
+
+ timelineMetricsHolder.extractMetricsForAggregationPublishing();
+ timelineMetricsHolder.extractMetricsForRawPublishing();
+
+ TimelineMetrics timelineMetrics = TimelineMetricsHolderTest.getTimelineMetricsWithAppID("appid");
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("timeline").path("metrics")
+ .accept(MediaType.TEXT_PLAIN)
+ .post(ClientResponse.class, timelineMetrics);
+ assertEquals(200, response.getStatus());
+ assertEquals(MediaType.TEXT_PLAIN, response.getType().toString());
+
+ Map<String, TimelineMetrics> aggregationMap = timelineMetricsHolder.extractMetricsForAggregationPublishing();
+ Map<String, TimelineMetrics> rawMap = timelineMetricsHolder.extractMetricsForRawPublishing();
+
+ Assert.assertEquals(1, aggregationMap.size());
+ Assert.assertEquals(1, rawMap.size());
+
+ Collection<TimelineMetrics> aggregationCollection = aggregationMap.values();
+ Collection<TimelineMetrics> rawCollection = rawMap.values();
+
+ Collection<String> aggregationCollectionKeys = aggregationMap.keySet();
+ Collection<String> rawCollectionKeys = rawMap.keySet();
+
+ for (String key : aggregationCollectionKeys) {
+ Assert.assertTrue(key.contains("appid"));
+ }
+
+ for (String key : rawCollectionKeys) {
+ Assert.assertTrue(key.contains("appid"));
+ }
+
+ Assert.assertEquals(1, aggregationCollection.size());
+ Assert.assertEquals(1, rawCollection.size());
+
+ TimelineMetrics aggregationTimelineMetrics = (TimelineMetrics) aggregationCollection.toArray()[0];
+ TimelineMetrics rawTimelineMetrics = (TimelineMetrics) rawCollection.toArray()[0];
+
+
+ Assert.assertEquals(1, aggregationTimelineMetrics.getMetrics().size());
+ Assert.assertEquals(1, rawTimelineMetrics.getMetrics().size());
+
+ Assert.assertEquals("appid", aggregationTimelineMetrics.getMetrics().get(0).getAppId());
+ Assert.assertEquals("appid", rawTimelineMetrics.getMetrics().get(0).getAppId());
+
+ aggregationMap = timelineMetricsHolder.extractMetricsForAggregationPublishing();
+ rawMap = timelineMetricsHolder.extractMetricsForRawPublishing();
+
+ //Cache should be empty after extraction
+ Assert.assertEquals(0, aggregationMap.size());
+ Assert.assertEquals(0, rawMap.size());
+ }
+
+
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/29f75089/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/host/aggregator/TimelineMetricsHolderTest.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/host/aggregator/TimelineMetricsHolderTest.java b/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/host/aggregator/TimelineMetricsHolderTest.java
new file mode 100644
index 0000000..7d8ebf4
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/host/aggregator/TimelineMetricsHolderTest.java
@@ -0,0 +1,107 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.metrics2.host.aggregator;
+
+import junit.framework.Assert;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
+import org.junit.Test;
+
+import java.lang.reflect.Field;
+import java.util.Collection;
+import java.util.Map;
+
+
+public class TimelineMetricsHolderTest {
+ private TimelineMetricsHolder timelineMetricsHolderInstance;
+
+ public void clearHolderSingleton() throws NoSuchFieldException, IllegalAccessException {
+ Class timelineMetricHolderClass = TimelineMetricsHolder.class;
+ Field field = timelineMetricHolderClass.getDeclaredField("instance");
+ field.setAccessible(true);
+ field.set(field, null);
+ }
+
+ @Test
+ public void testGetInstanceDefaultValues() throws Exception {
+ clearHolderSingleton();
+ Assert.assertNotNull(TimelineMetricsHolder.getInstance());
+ }
+
+ @Test
+ public void testGetInstanceWithParameters() throws Exception {
+ clearHolderSingleton();
+ Assert.assertNotNull(TimelineMetricsHolder.getInstance(1,2));
+ }
+
+ @Test
+ public void testCache() throws Exception {
+ clearHolderSingleton();
+ timelineMetricsHolderInstance = TimelineMetricsHolder.getInstance(4,4);
+ timelineMetricsHolderInstance.putMetricsForAggregationPublishing(getTimelineMetricsWithAppID("aggr"));
+ timelineMetricsHolderInstance.putMetricsForRawPublishing(getTimelineMetricsWithAppID("raw"));
+
+ Map<String, TimelineMetrics> aggregationMap = timelineMetricsHolderInstance.extractMetricsForAggregationPublishing();
+ Map<String, TimelineMetrics> rawMap = timelineMetricsHolderInstance.extractMetricsForRawPublishing();
+
+ Assert.assertEquals(1, aggregationMap.size());
+ Assert.assertEquals(1, rawMap.size());
+
+ Collection<TimelineMetrics> aggregationCollection = aggregationMap.values();
+ Collection<TimelineMetrics> rawCollection = rawMap.values();
+
+ Collection<String> aggregationCollectionKeys = aggregationMap.keySet();
+ Collection<String> rawCollectionKeys = rawMap.keySet();
+
+ for (String key : aggregationCollectionKeys) {
+ Assert.assertTrue(key.contains("aggr"));
+ }
+
+ for (String key : rawCollectionKeys) {
+ Assert.assertTrue(key.contains("raw"));
+ }
+
+ Assert.assertEquals(1, aggregationCollection.size());
+ Assert.assertEquals(1, rawCollection.size());
+
+ TimelineMetrics aggregationTimelineMetrics = (TimelineMetrics) aggregationCollection.toArray()[0];
+ TimelineMetrics rawTimelineMetrics = (TimelineMetrics) rawCollection.toArray()[0];
+
+
+ Assert.assertEquals(1, aggregationTimelineMetrics.getMetrics().size());
+ Assert.assertEquals(1, rawTimelineMetrics.getMetrics().size());
+
+ Assert.assertEquals("aggr", aggregationTimelineMetrics.getMetrics().get(0).getAppId());
+ Assert.assertEquals("raw", rawTimelineMetrics.getMetrics().get(0).getAppId());
+
+ aggregationMap = timelineMetricsHolderInstance.extractMetricsForAggregationPublishing();
+ rawMap = timelineMetricsHolderInstance.extractMetricsForRawPublishing();
+
+ //Cache should be empty after extraction
+ Assert.assertEquals(0, aggregationMap.size());
+ Assert.assertEquals(0, rawMap.size());
+ }
+
+ public static TimelineMetrics getTimelineMetricsWithAppID(String appId) {
+ TimelineMetric timelineMetric = new TimelineMetric();
+ timelineMetric.setAppId(appId);
+ TimelineMetrics timelineMetrics = new TimelineMetrics();
+ timelineMetrics.addOrMergeTimelineMetric(timelineMetric);
+ return timelineMetrics;
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/29f75089/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/sink/timeline/AbstractMetricPublisherTest.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/sink/timeline/AbstractMetricPublisherTest.java b/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/sink/timeline/AbstractMetricPublisherTest.java
new file mode 100644
index 0000000..a8ddbee
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/sink/timeline/AbstractMetricPublisherTest.java
@@ -0,0 +1,82 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.metrics2.sink.timeline;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.metrics2.host.aggregator.TimelineMetricsHolder;
+import org.apache.hadoop.metrics2.host.aggregator.TimelineMetricsHolderTest;
+import org.junit.Test;
+
+import java.util.Map;
+
+import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.anyString;
+import static org.easymock.EasyMock.createMockBuilder;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.expectLastCall;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.verify;
+
+public class AbstractMetricPublisherTest {
+ @Test
+ public void testProcessAndPublishMetrics() throws Exception {
+ AbstractMetricPublisher publisherMock =
+ createMockBuilder(RawMetricsPublisher.class)
+ .withConstructor(TimelineMetricsHolder.getInstance(), new Configuration(), 60)
+ .addMockedMethod("processMetrics")
+ .addMockedMethod("getCollectorUri")
+ .addMockedMethod("emitMetricsJson")
+ .addMockedMethod("getCurrentCollectorHost").createStrictMock();
+
+ TimelineMetricsHolder.getInstance().putMetricsForRawPublishing(TimelineMetricsHolderTest.getTimelineMetricsWithAppID("raw"));
+ expect(publisherMock.getCurrentCollectorHost()).andReturn("collectorhost").once();
+ expect(publisherMock.getCollectorUri(anyString())).andReturn("https://collectorhost:11/metrics").once();
+ expect(publisherMock.processMetrics(anyObject(Map.class))).andReturn("{metrics}").once();
+ expect(publisherMock.emitMetricsJson("https://collectorhost:11/metrics", "{metrics}")).andReturn(true).once();
+
+ replay(publisherMock);
+
+ publisherMock.processAndPublishMetrics(TimelineMetricsHolder.getInstance().extractMetricsForRawPublishing());
+
+ verify(publisherMock);
+ }
+
+ @Test
+ public void testRunAndStop() throws Exception {
+ AbstractMetricPublisher publisherMock = createMockBuilder(RawMetricsPublisher.class)
+ .withConstructor(TimelineMetricsHolder.getInstance(), new Configuration(), 1)
+ .addMockedMethod("processAndPublishMetrics").createStrictMock();
+ publisherMock.processAndPublishMetrics(anyObject(Map.class));
+ expectLastCall().times(1);
+
+
+ Thread t = createMockBuilder(Thread.class)
+ .withConstructor(publisherMock)
+ .addMockedMethod("isInterrupted").createStrictMock();
+ expect(t.isInterrupted()).andReturn(false).once();
+ expect(t.isInterrupted()).andReturn(true).once();
+
+ replay(publisherMock, t);
+
+ t.start();
+
+ Thread.sleep(2222);
+
+ verify(publisherMock, t);
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/29f75089/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/sink/timeline/AggregatedMetricsPublisherTest.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/sink/timeline/AggregatedMetricsPublisherTest.java b/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/sink/timeline/AggregatedMetricsPublisherTest.java
new file mode 100644
index 0000000..3413052
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/sink/timeline/AggregatedMetricsPublisherTest.java
@@ -0,0 +1,154 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.metrics2.sink.timeline;
+
+import junit.framework.Assert;
+import org.apache.hadoop.metrics2.host.aggregator.TimelineMetricsHolder;
+import org.apache.hadoop.metrics2.host.aggregator.TimelineMetricsHolderTest;
+import org.junit.Test;
+
+import org.apache.hadoop.conf.Configuration;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+public class AggregatedMetricsPublisherTest {
+
+ @Test
+ public void testProcessMetrics() throws Exception {
+ Configuration configuration = new Configuration();
+ TimelineMetricsHolder timelineMetricsHolder = TimelineMetricsHolder.getInstance();
+ timelineMetricsHolder.extractMetricsForAggregationPublishing();
+ timelineMetricsHolder.extractMetricsForRawPublishing();
+
+ TreeMap<Long, Double> metric1App1Metrics = new TreeMap<>();
+ metric1App1Metrics.put(1L, 1d);
+ metric1App1Metrics.put(2L, 2d);
+ metric1App1Metrics.put(3L, 3d);
+ timelineMetricsHolder.putMetricsForAggregationPublishing(getTimelineMetricsForAppId("metricName1", "app1", metric1App1Metrics));
+
+ TreeMap<Long, Double> metric2App2Metrics = new TreeMap<>();
+ metric2App2Metrics.put(1L, 4d);
+ metric2App2Metrics.put(2L, 5d);
+ metric2App2Metrics.put(3L, 6d);
+ timelineMetricsHolder.putMetricsForAggregationPublishing(getTimelineMetricsForAppId("metricName2", "app2", metric2App2Metrics));
+
+ TreeMap<Long, Double> metric3App3Metrics = new TreeMap<>();
+ metric3App3Metrics.put(1L, 7d);
+ metric3App3Metrics.put(2L, 8d);
+ metric3App3Metrics.put(3L, 9d);
+
+ timelineMetricsHolder.putMetricsForAggregationPublishing(getTimelineMetricsForAppId("metricName3", "app3", metric3App3Metrics));
+
+
+ AggregatedMetricsPublisher aggregatedMetricsPublisher =
+ new AggregatedMetricsPublisher(TimelineMetricsHolder.getInstance(), configuration, 60);
+
+ String aggregatedJson = aggregatedMetricsPublisher.processMetrics(timelineMetricsHolder.extractMetricsForAggregationPublishing());
+ String expectedMetric1App1 = "{\"timelineMetric\":{\"timestamp\":0,\"metadata\":{},\"metricname\":\"metricName1\",\"appid\":\"app1\",\"starttime\":0,\"metrics\":{}},\"metricAggregate\":{\"sum\":6.0,\"deviation\":0.0,\"max\":3.0,\"min\":1.0,\"numberOfSamples\":3}}";
+ String expectedMetric2App2 = "{\"timelineMetric\":{\"timestamp\":0,\"metadata\":{},\"metricname\":\"metricName2\",\"appid\":\"app2\",\"starttime\":0,\"metrics\":{}},\"metricAggregate\":{\"sum\":15.0,\"deviation\":0.0,\"max\":6.0,\"min\":4.0,\"numberOfSamples\":3}}";
+ String expectedMetric3App3 = "{\"timelineMetric\":{\"timestamp\":0,\"metadata\":{},\"metricname\":\"metricName3\",\"appid\":\"app3\",\"starttime\":0,\"metrics\":{}},\"metricAggregate\":{\"sum\":24.0,\"deviation\":0.0,\"max\":9.0,\"min\":7.0,\"numberOfSamples\":3}}";
+ Assert.assertNotNull(aggregatedJson);
+ Assert.assertTrue(aggregatedJson.contains(expectedMetric1App1));
+ Assert.assertTrue(aggregatedJson.contains(expectedMetric3App3));
+ Assert.assertTrue(aggregatedJson.contains(expectedMetric2App2));
+ }
+
+ @Test
+ public void testGetPostUrl() {
+ Configuration configuration = new Configuration();
+ AggregatedMetricsPublisher aggregatedMetricsPublisher =
+ new AggregatedMetricsPublisher(TimelineMetricsHolder.getInstance(), configuration, 1);
+ String actualURL = aggregatedMetricsPublisher.getPostUrl();
+ String expectedURL = "%s://%s:%s/ws/v1/timeline/metrics/aggregated";
+ Assert.assertNotNull(actualURL);
+ Assert.assertEquals(expectedURL, actualURL);
+ }
+
+ @Test
+ public void testGetCollectorUri() {
+ //default configuration
+ Configuration configuration = new Configuration();
+ AbstractMetricPublisher aggregatedMetricsPublisher =
+ new AggregatedMetricsPublisher(TimelineMetricsHolder.getInstance(), configuration, 1);
+ String actualURL = aggregatedMetricsPublisher.getCollectorUri("c6401.ambari.apache.org");
+ String expectedURL = "http://c6401.ambari.apache.org:6188/ws/v1/timeline/metrics/aggregated";
+ Assert.assertNotNull(actualURL);
+ Assert.assertEquals(expectedURL, actualURL);
+
+ //https configuration
+ configuration = new Configuration();
+ configuration.set("timeline.metrics.service.http.policy", "HTTPS_ONLY");
+ aggregatedMetricsPublisher =
+ new AggregatedMetricsPublisher(TimelineMetricsHolder.getInstance(), configuration, 1);
+ actualURL = aggregatedMetricsPublisher.getCollectorUri("c6402.ambari.apache.org");
+ expectedURL = "https://c6402.ambari.apache.org:6188/ws/v1/timeline/metrics/aggregated";
+ Assert.assertNotNull(actualURL);
+ Assert.assertEquals(expectedURL, actualURL);
+
+ //custom port configuration
+ configuration = new Configuration();
+ configuration.set("timeline.metrics.service.webapp.address", "0.0.0.0:8888");
+ aggregatedMetricsPublisher =
+ new AggregatedMetricsPublisher(TimelineMetricsHolder.getInstance(), configuration, 1);
+ actualURL = aggregatedMetricsPublisher.getCollectorUri("c6403.ambari.apache.org");
+ expectedURL = "http://c6403.ambari.apache.org:8888/ws/v1/timeline/metrics/aggregated";
+ Assert.assertNotNull(actualURL);
+ Assert.assertEquals(expectedURL, actualURL);
+ }
+
+ @Test
+ public void testGetMetricsFromCache() throws InterruptedException {
+ TimelineMetricsHolder timelineMetricsHolder = TimelineMetricsHolder.getInstance(4,4);
+ timelineMetricsHolder.extractMetricsForAggregationPublishing();
+ timelineMetricsHolder.extractMetricsForRawPublishing();
+
+ timelineMetricsHolder.putMetricsForAggregationPublishing(TimelineMetricsHolderTest.getTimelineMetricsWithAppID("aggr1"));
+ timelineMetricsHolder.putMetricsForRawPublishing(TimelineMetricsHolderTest.getTimelineMetricsWithAppID("raw"));
+ timelineMetricsHolder.putMetricsForAggregationPublishing(TimelineMetricsHolderTest.getTimelineMetricsWithAppID("aggr2"));
+
+ Configuration configuration = new Configuration();
+ AggregatedMetricsPublisher aggregatedMetricsPublisher =
+ new AggregatedMetricsPublisher(TimelineMetricsHolder.getInstance(), configuration, 1);
+
+ Map<String, TimelineMetrics> metricsFromCache = aggregatedMetricsPublisher.getMetricsFromCache();
+ Assert.assertNotNull(metricsFromCache);
+ Collection<TimelineMetrics> actualTimelineMetrics = metricsFromCache.values();
+ Assert.assertNotNull(actualTimelineMetrics);
+ Assert.assertEquals(2, actualTimelineMetrics.size());
+
+ for (TimelineMetrics timelineMetrics : actualTimelineMetrics) {
+ List<TimelineMetric> metrics = timelineMetrics.getMetrics();
+ Assert.assertEquals(1, metrics.size());
+ Assert.assertTrue(metrics.get(0).getAppId().contains("aggr"));
+ }
+
+ }
+
+ TimelineMetrics getTimelineMetricsForAppId(String metricName, String appId, TreeMap<Long, Double> metricValues) {
+ TimelineMetric timelineMetric = new TimelineMetric();
+ timelineMetric.setMetricName(metricName);
+ timelineMetric.setAppId(appId);
+ timelineMetric.setMetricValues(metricValues);
+ TimelineMetrics timelineMetrics = new TimelineMetrics();
+ timelineMetrics.addOrMergeTimelineMetric(timelineMetric);
+ return timelineMetrics;
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/29f75089/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/sink/timeline/RawMetricsPublisherTest.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/sink/timeline/RawMetricsPublisherTest.java b/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/sink/timeline/RawMetricsPublisherTest.java
new file mode 100644
index 0000000..60510d2
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-host-aggregator/src/test/java/org/apache/hadoop/metrics2/sink/timeline/RawMetricsPublisherTest.java
@@ -0,0 +1,151 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.metrics2.sink.timeline;
+
+import junit.framework.Assert;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.metrics2.host.aggregator.TimelineMetricsHolder;
+import org.apache.hadoop.metrics2.host.aggregator.TimelineMetricsHolderTest;
+import org.junit.Test;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+
+public class RawMetricsPublisherTest {
+ @Test
+ public void testProcessMetrics() throws Exception {
+ Configuration configuration = new Configuration();
+ TimelineMetricsHolder timelineMetricsHolder = TimelineMetricsHolder.getInstance();
+
+ timelineMetricsHolder.extractMetricsForAggregationPublishing();
+ timelineMetricsHolder.extractMetricsForRawPublishing();
+
+ TreeMap<Long, Double> metric1App1Metrics = new TreeMap<>();
+ metric1App1Metrics.put(1L, 1d);
+ metric1App1Metrics.put(2L, 2d);
+ metric1App1Metrics.put(3L, 3d);
+ timelineMetricsHolder.putMetricsForRawPublishing(getTimelineMetricsForAppId("metricName1", "app1", metric1App1Metrics));
+
+ TreeMap<Long, Double> metric2App2Metrics = new TreeMap<>();
+ metric2App2Metrics.put(1L, 4d);
+ metric2App2Metrics.put(2L, 5d);
+ metric2App2Metrics.put(3L, 6d);
+ timelineMetricsHolder.putMetricsForRawPublishing(getTimelineMetricsForAppId("metricName2", "app2", metric2App2Metrics));
+
+ TreeMap<Long, Double> metric3App3Metrics = new TreeMap<>();
+ metric3App3Metrics.put(1L, 7d);
+ metric3App3Metrics.put(2L, 8d);
+ metric3App3Metrics.put(3L, 9d);
+
+ timelineMetricsHolder.putMetricsForRawPublishing(getTimelineMetricsForAppId("metricName3", "app3", metric3App3Metrics));
+
+
+ RawMetricsPublisher rawMetricsPublisher =
+ new RawMetricsPublisher(TimelineMetricsHolder.getInstance(), configuration, 60);
+
+ String rawJson = rawMetricsPublisher.processMetrics(timelineMetricsHolder.extractMetricsForRawPublishing());
+ String expectedResult = "{\"metrics\":[{\"timestamp\":0,\"metadata\":{},\"metricname\":\"metricName1\",\"appid\":\"app1\",\"starttime\":0,\"metrics\":{\"1\":1.0,\"2\":2.0,\"3\":3.0}},{\"timestamp\":0,\"metadata\":{},\"metricname\":\"metricName2\",\"appid\":\"app2\",\"starttime\":0,\"metrics\":{\"1\":4.0,\"2\":5.0,\"3\":6.0}},{\"timestamp\":0,\"metadata\":{},\"metricname\":\"metricName3\",\"appid\":\"app3\",\"starttime\":0,\"metrics\":{\"1\":7.0,\"2\":8.0,\"3\":9.0}}]}";
+ Assert.assertNotNull(rawJson);
+ Assert.assertEquals(expectedResult, rawJson);
+ }
+
+ @Test
+ public void testGetPostUrl() {
+ Configuration configuration = new Configuration();
+ RawMetricsPublisher rawMetricsPublisher =
+ new RawMetricsPublisher(TimelineMetricsHolder.getInstance(), configuration, 1);
+ String actualURL = rawMetricsPublisher.getPostUrl();
+ String expectedURL = "%s://%s:%s/ws/v1/timeline/metrics";
+ Assert.assertNotNull(actualURL);
+ Assert.assertEquals(expectedURL, actualURL);
+ }
+
+ @Test
+ public void testGetCollectorUri() {
+ //default configuration
+ Configuration configuration = new Configuration();
+ AbstractMetricPublisher rawMetricsPublisher =
+ new RawMetricsPublisher(TimelineMetricsHolder.getInstance(), configuration, 1);
+ String actualURL = rawMetricsPublisher.getCollectorUri("c6401.ambari.apache.org");
+ String expectedURL = "http://c6401.ambari.apache.org:6188/ws/v1/timeline/metrics";
+ Assert.assertNotNull(actualURL);
+ Assert.assertEquals(expectedURL, actualURL);
+
+ //https configuration
+ configuration = new Configuration();
+ configuration.set("timeline.metrics.service.http.policy", "HTTPS_ONLY");
+ rawMetricsPublisher =
+ new RawMetricsPublisher(TimelineMetricsHolder.getInstance(), configuration, 1);
+ actualURL = rawMetricsPublisher.getCollectorUri("c6402.ambari.apache.org");
+ expectedURL = "https://c6402.ambari.apache.org:6188/ws/v1/timeline/metrics";
+ Assert.assertNotNull(actualURL);
+ Assert.assertEquals(expectedURL, actualURL);
+
+ //custom port configuration
+ configuration = new Configuration();
+ configuration.set("timeline.metrics.service.webapp.address", "0.0.0.0:8888");
+ rawMetricsPublisher =
+ new RawMetricsPublisher(TimelineMetricsHolder.getInstance(), configuration, 1);
+ actualURL = rawMetricsPublisher.getCollectorUri("c6403.ambari.apache.org");
+ expectedURL = "http://c6403.ambari.apache.org:8888/ws/v1/timeline/metrics";
+ Assert.assertNotNull(actualURL);
+ Assert.assertEquals(expectedURL, actualURL);
+ }
+
+ @Test
+ public void testGetMetricsFromCache() throws InterruptedException {
+
+ TimelineMetricsHolder timelineMetricsHolder = TimelineMetricsHolder.getInstance(4,4);
+ timelineMetricsHolder.extractMetricsForAggregationPublishing();
+ timelineMetricsHolder.extractMetricsForRawPublishing();
+
+ timelineMetricsHolder.putMetricsForRawPublishing(TimelineMetricsHolderTest.getTimelineMetricsWithAppID("raw1"));
+ timelineMetricsHolder.putMetricsForAggregationPublishing(TimelineMetricsHolderTest.getTimelineMetricsWithAppID("aggr"));
+ timelineMetricsHolder.putMetricsForRawPublishing(TimelineMetricsHolderTest.getTimelineMetricsWithAppID("raw2"));
+
+ Configuration configuration = new Configuration();
+ RawMetricsPublisher rawMetricsPublisher =
+ new RawMetricsPublisher(TimelineMetricsHolder.getInstance(), configuration, 1);
+
+ Map<String, TimelineMetrics> metricsFromCache = rawMetricsPublisher.getMetricsFromCache();
+ Assert.assertNotNull(metricsFromCache);
+ Collection<TimelineMetrics> actualTimelineMetrics = metricsFromCache.values();
+ Assert.assertNotNull(actualTimelineMetrics);
+ Assert.assertEquals(2, actualTimelineMetrics.size());
+
+ for (TimelineMetrics timelineMetrics : actualTimelineMetrics) {
+ List<TimelineMetric> metrics = timelineMetrics.getMetrics();
+ Assert.assertEquals(1, metrics.size());
+ Assert.assertTrue(metrics.get(0).getAppId().contains("raw"));
+ }
+
+ }
+
+ TimelineMetrics getTimelineMetricsForAppId(String metricName, String appId, TreeMap<Long, Double> metricValues) {
+ TimelineMetric timelineMetric = new TimelineMetric();
+ timelineMetric.setMetricName(metricName);
+ timelineMetric.setAppId(appId);
+ timelineMetric.setMetricValues(metricValues);
+ TimelineMetrics timelineMetrics = new TimelineMetrics();
+ timelineMetrics.addOrMergeTimelineMetric(timelineMetric);
+ return timelineMetrics;
+ }
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/29f75089/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/aggregator.py
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/aggregator.py b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/aggregator.py
index 2249e53..ba05e9b 100644
--- a/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/aggregator.py
+++ b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/aggregator.py
@@ -42,9 +42,10 @@ class Aggregator(threading.Thread):
ams_log_file = "ambari-metrics-aggregator.log"
additional_classpath = ':{0}'.format(config_dir)
ams_log_dir = self._config.ams_monitor_log_dir()
+ hostname = self._config.get_hostname_config()
logger.info('Starting Aggregator thread.')
- cmd = "{0}/bin/java {1} -Dams.log.dir={2} -Dams.log.file={3} -cp /var/lib/ambari-metrics-monitor/lib/*{4} {5} {6}"\
- .format(java_home, jvm_agrs, ams_log_dir, ams_log_file, additional_classpath, class_name, collector_hosts)
+ cmd = "{0}/bin/java {1} -Dams.log.dir={2} -Dams.log.file={3} -cp /var/lib/ambari-metrics-monitor/lib/*{4} {5} {6} {7}"\
+ .format(java_home, jvm_agrs, ams_log_dir, ams_log_file, additional_classpath, class_name, hostname, collector_hosts)
logger.info("Executing : {0}".format(cmd))
@@ -60,6 +61,7 @@ class Aggregator(threading.Thread):
if self._aggregator_process :
logger.info('Stopping Aggregator thread.')
self._aggregator_process.terminate()
+ self._aggregator_process = None
class AggregatorWatchdog(threading.Thread):
SLEEP_TIME = 30
[9/9] ambari git commit: Merge branch 'trunk' into
branch-feature-AMBARI-20859
Posted by rl...@apache.org.
Merge branch 'trunk' into branch-feature-AMBARI-20859
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/ebe39390
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/ebe39390
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/ebe39390
Branch: refs/heads/branch-feature-AMBARI-20859
Commit: ebe3939006a3080870355be39a9d3841908718bd
Parents: 103dfff 29f7508
Author: Robert Levas <rl...@hortonworks.com>
Authored: Fri Jun 9 14:46:19 2017 -0400
Committer: Robert Levas <rl...@hortonworks.com>
Committed: Fri Jun 9 14:46:19 2017 -0400
----------------------------------------------------------------------
ambari-metrics/ambari-metrics-common/pom.xml | 5 +
.../timeline/AbstractTimelineMetricsSink.java | 64 +++++-
.../sink/timeline/AppCookieManager.java | 219 +++++++++++++++++++
.../sink/timeline/AppCookieManagerTest.java | 52 +++++
.../ambari-metrics-host-aggregator/pom.xml | 30 ++-
.../AbstractMetricPublisherThread.java | 134 ------------
.../aggregator/AggregatedMetricsPublisher.java | 101 ---------
.../host/aggregator/AggregatorApplication.java | 98 ++++++---
.../host/aggregator/AggregatorWebService.java | 2 +-
.../host/aggregator/RawMetricsPublisher.java | 60 -----
.../host/aggregator/TimelineMetricsHolder.java | 26 ++-
.../sink/timeline/AbstractMetricPublisher.java | 169 ++++++++++++++
.../timeline/AggregatedMetricsPublisher.java | 103 +++++++++
.../sink/timeline/RawMetricsPublisher.java | 65 ++++++
.../aggregator/AggregatorApplicationTest.java | 55 +++++
.../aggregator/AggregatorWebServiceTest.java | 135 ++++++++++++
.../aggregator/TimelineMetricsHolderTest.java | 107 +++++++++
.../timeline/AbstractMetricPublisherTest.java | 82 +++++++
.../AggregatedMetricsPublisherTest.java | 154 +++++++++++++
.../sink/timeline/RawMetricsPublisherTest.java | 151 +++++++++++++
.../src/main/python/core/aggregator.py | 6 +-
.../src/main/python/core/controller.py | 2 +-
.../main/resources/Ambari-DDL-Oracle-CREATE.sql | 12 +-
.../0.1.0/configuration/ams-hbase-env.xml | 4 +-
.../package/templates/hbase_master_jaas.conf.j2 | 10 +
.../templates/hbase_regionserver_jaas.conf.j2 | 10 +
.../package/templates/hbase_master_jaas.conf.j2 | 10 +
.../templates/hbase_regionserver_jaas.conf.j2 | 10 +
.../HBASE/2.0.0.3.0/configuration/hbase-env.xml | 4 +-
.../package/templates/hbase_master_jaas.conf.j2 | 10 +
.../templates/hbase_regionserver_jaas.conf.j2 | 10 +
.../HDFS/2.1.0.2.0/package/scripts/hdfs.py | 17 ++
.../package/templates/hdfs_dn_jaas.conf.j2 | 27 +++
.../package/templates/hdfs_jn_jaas.conf.j2 | 27 +++
.../package/templates/hdfs_nn_jaas.conf.j2 | 27 +++
.../HDFS/3.0.0.3.0/package/scripts/hdfs.py | 17 ++
.../package/templates/hdfs_dn_jaas.conf.j2 | 27 +++
.../package/templates/hdfs_jn_jaas.conf.j2 | 27 +++
.../package/templates/hdfs_nn_jaas.conf.j2 | 27 +++
.../KAFKA/0.8.1/configuration/kafka-env.xml | 4 +
.../0.8.1/configuration/kafka_jaas_conf.xml | 11 +
.../0.8.1/package/templates/kafka_jaas.conf.j2 | 11 +
.../0.9.1/package/scripts/storm_yaml_utils.py | 5 +-
.../0.9.1/package/templates/storm_jaas.conf.j2 | 10 +
.../2.1.0.2.0/package/scripts/params_linux.py | 32 ++-
.../YARN/2.1.0.2.0/package/scripts/yarn.py | 17 ++
.../package/templates/mapred_jaas.conf.j2 | 28 +++
.../package/templates/yarn_ats_jaas.conf.j2 | 27 +++
.../package/templates/yarn_jaas.conf.j2 | 12 +-
.../package/templates/yarn_nm_jaas.conf.j2 | 27 +++
.../configuration-mapred/mapred-env.xml | 4 +-
.../YARN/3.0.0.3.0/configuration/yarn-env.xml | 15 +-
.../3.0.0.3.0/package/scripts/params_linux.py | 32 ++-
.../YARN/3.0.0.3.0/package/scripts/yarn.py | 19 +-
.../package/templates/mapred_jaas.conf.j2 | 28 +++
.../package/templates/yarn_ats_jaas.conf.j2 | 27 +++
.../package/templates/yarn_jaas.conf.j2 | 12 +-
.../package/templates/yarn_nm_jaas.conf.j2 | 27 +++
.../0.6.0.2.5/package/scripts/master.py | 4 +-
.../YARN/configuration-mapred/mapred-env.xml | 4 +-
.../services/HBASE/configuration/hbase-env.xml | 4 +-
.../services/HDFS/configuration/hadoop-env.xml | 7 +
.../services/YARN/configuration/yarn-env.xml | 16 +-
.../services/HDFS/configuration/hadoop-env.xml | 7 +
.../services/HDFS/configuration/hadoop-env.xml | 7 +
.../YARN/configuration-mapred/mapred-env.xml | 4 +-
.../python/stacks/2.0.6/HDFS/test_datanode.py | 10 +
.../stacks/2.0.6/HDFS/test_journalnode.py | 11 +-
.../python/stacks/2.0.6/HDFS/test_namenode.py | 24 +-
.../python/stacks/2.0.6/HDFS/test_nfsgateway.py | 10 +
.../python/stacks/2.0.6/HDFS/test_snamenode.py | 12 +-
.../test/python/stacks/2.0.6/HDFS/test_zkfc.py | 17 +-
.../stacks/2.0.6/YARN/test_historyserver.py | 10 +
.../stacks/2.0.6/YARN/test_mapreduce2_client.py | 10 +
.../stacks/2.0.6/YARN/test_nodemanager.py | 10 +
.../stacks/2.0.6/YARN/test_resourcemanager.py | 10 +
.../stacks/2.0.6/YARN/test_yarn_client.py | 10 +
ambari-web/app/config.js | 6 +-
ambari-web/app/routes/add_service_routes.js | 2 +-
ambari-web/app/routes/main.js | 2 +-
.../app/templates/main/admin/kerberos.hbs | 34 +--
.../main/service/all_services_actions.hbs | 6 +-
ambari-web/app/views/main/admin.js | 14 +-
.../main/admin/stack_upgrade/services_view.js | 2 +-
ambari-web/app/views/main/menu.js | 16 +-
ambari-web/app/views/main/service/item.js | 2 +-
.../admin/stack_upgrade/services_view_test.js | 1 +
docs/pom.xml | 2 +-
88 files changed, 2276 insertions(+), 442 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/ebe39390/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
----------------------------------------------------------------------
[2/9] ambari git commit: AMBARI-19369. Add Kerberos HTTP SPNEGO
authentication support to Hadoop/hbase/kafka/storm sinks (Qin Liu via rlevas)
Posted by rl...@apache.org.
AMBARI-19369. Add Kerberos HTTP SPNEGO authentication support to Hadoop/hbase/kafka/storm sinks (Qin Liu via rlevas)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/4aaf259e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/4aaf259e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/4aaf259e
Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 4aaf259e191344076a88391f5853da4bf85b8a80
Parents: b98f07f
Author: Qin Liu <qi...@gmail.com>
Authored: Thu Jun 8 16:23:34 2017 -0400
Committer: Robert Levas <rl...@hortonworks.com>
Committed: Thu Jun 8 16:23:34 2017 -0400
----------------------------------------------------------------------
ambari-metrics/ambari-metrics-common/pom.xml | 5 +
.../timeline/AbstractTimelineMetricsSink.java | 60 +++++
.../sink/timeline/AppCookieManager.java | 219 +++++++++++++++++++
.../sink/timeline/AppCookieManagerTest.java | 52 +++++
.../0.1.0/configuration/ams-hbase-env.xml | 4 +-
.../package/templates/hbase_master_jaas.conf.j2 | 10 +
.../templates/hbase_regionserver_jaas.conf.j2 | 10 +
.../package/templates/hbase_master_jaas.conf.j2 | 10 +
.../templates/hbase_regionserver_jaas.conf.j2 | 10 +
.../HBASE/2.0.0.3.0/configuration/hbase-env.xml | 4 +-
.../package/templates/hbase_master_jaas.conf.j2 | 10 +
.../templates/hbase_regionserver_jaas.conf.j2 | 10 +
.../HDFS/2.1.0.2.0/package/scripts/hdfs.py | 17 ++
.../package/templates/hdfs_dn_jaas.conf.j2 | 27 +++
.../package/templates/hdfs_jn_jaas.conf.j2 | 27 +++
.../package/templates/hdfs_nn_jaas.conf.j2 | 27 +++
.../HDFS/3.0.0.3.0/package/scripts/hdfs.py | 17 ++
.../package/templates/hdfs_dn_jaas.conf.j2 | 27 +++
.../package/templates/hdfs_jn_jaas.conf.j2 | 27 +++
.../package/templates/hdfs_nn_jaas.conf.j2 | 27 +++
.../KAFKA/0.8.1/configuration/kafka-env.xml | 4 +
.../0.8.1/configuration/kafka_jaas_conf.xml | 11 +
.../0.8.1/package/templates/kafka_jaas.conf.j2 | 11 +
.../0.9.1/package/scripts/storm_yaml_utils.py | 5 +-
.../0.9.1/package/templates/storm_jaas.conf.j2 | 10 +
.../2.1.0.2.0/package/scripts/params_linux.py | 32 ++-
.../YARN/2.1.0.2.0/package/scripts/yarn.py | 17 ++
.../package/templates/mapred_jaas.conf.j2 | 28 +++
.../package/templates/yarn_ats_jaas.conf.j2 | 27 +++
.../package/templates/yarn_jaas.conf.j2 | 12 +-
.../package/templates/yarn_nm_jaas.conf.j2 | 27 +++
.../configuration-mapred/mapred-env.xml | 4 +-
.../YARN/3.0.0.3.0/configuration/yarn-env.xml | 15 +-
.../3.0.0.3.0/package/scripts/params_linux.py | 32 ++-
.../YARN/3.0.0.3.0/package/scripts/yarn.py | 19 +-
.../package/templates/mapred_jaas.conf.j2 | 28 +++
.../package/templates/yarn_ats_jaas.conf.j2 | 27 +++
.../package/templates/yarn_jaas.conf.j2 | 12 +-
.../package/templates/yarn_nm_jaas.conf.j2 | 27 +++
.../YARN/configuration-mapred/mapred-env.xml | 4 +-
.../services/HBASE/configuration/hbase-env.xml | 4 +-
.../services/HDFS/configuration/hadoop-env.xml | 7 +
.../services/YARN/configuration/yarn-env.xml | 16 +-
.../services/HDFS/configuration/hadoop-env.xml | 7 +
.../services/HDFS/configuration/hadoop-env.xml | 7 +
.../YARN/configuration-mapred/mapred-env.xml | 4 +-
.../python/stacks/2.0.6/HDFS/test_datanode.py | 10 +
.../stacks/2.0.6/HDFS/test_journalnode.py | 11 +-
.../python/stacks/2.0.6/HDFS/test_namenode.py | 24 +-
.../python/stacks/2.0.6/HDFS/test_nfsgateway.py | 10 +
.../python/stacks/2.0.6/HDFS/test_snamenode.py | 12 +-
.../test/python/stacks/2.0.6/HDFS/test_zkfc.py | 17 +-
.../stacks/2.0.6/YARN/test_historyserver.py | 10 +
.../stacks/2.0.6/YARN/test_mapreduce2_client.py | 10 +
.../stacks/2.0.6/YARN/test_nodemanager.py | 10 +
.../stacks/2.0.6/YARN/test_resourcemanager.py | 10 +
.../stacks/2.0.6/YARN/test_yarn_client.py | 10 +
57 files changed, 1084 insertions(+), 47 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-metrics/ambari-metrics-common/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-common/pom.xml b/ambari-metrics/ambari-metrics-common/pom.xml
index 62ae75f..f0d3963 100644
--- a/ambari-metrics/ambari-metrics-common/pom.xml
+++ b/ambari-metrics/ambari-metrics-common/pom.xml
@@ -189,5 +189,10 @@
<artifactId>powermock-module-junit4</artifactId>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>org.apache.httpcomponents</groupId>
+ <artifactId>httpclient</artifactId>
+ <version>4.2.5</version>
+ </dependency>
</dependencies>
</project>
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
index a8dc571..fddf4b3 100644
--- a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
+++ b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AbstractTimelineMetricsSink.java
@@ -30,6 +30,7 @@ import org.apache.hadoop.metrics2.sink.timeline.availability.MetricCollectorHAHe
import org.apache.hadoop.metrics2.sink.timeline.availability.MetricCollectorUnavailableException;
import org.apache.hadoop.metrics2.sink.timeline.availability.MetricSinkWriteShardHostnameHashingStrategy;
import org.apache.hadoop.metrics2.sink.timeline.availability.MetricSinkWriteShardStrategy;
+import org.apache.http.HttpStatus;
import org.codehaus.jackson.map.AnnotationIntrospector;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.annotate.JsonSerialize;
@@ -83,6 +84,9 @@ public abstract class AbstractTimelineMetricsSink {
public static final String COLLECTOR_LIVE_NODES_PATH = "/ws/v1/timeline/metrics/livenodes";
public static final String INSTANCE_ID_PROPERTY = "instanceId";
public static final String SET_INSTANCE_ID_PROPERTY = "set.instanceId";
+ public static final String COOKIE = "Cookie";
+ private static final String WWW_AUTHENTICATE = "WWW-Authenticate";
+ private static final String NEGOTIATE = "Negotiate";
protected static final AtomicInteger failedCollectorConnectionsCounter = new AtomicInteger(0);
public static int NUMBER_OF_SKIPPED_COLLECTOR_EXCEPTIONS = 100;
@@ -97,6 +101,7 @@ public abstract class AbstractTimelineMetricsSink {
private long lastFailedZkRequestTime = 0l;
private SSLSocketFactory sslSocketFactory;
+ private AppCookieManager appCookieManager = null;
protected final Log LOG;
@@ -157,6 +162,18 @@ public abstract class AbstractTimelineMetricsSink {
connection = connectUrl.startsWith("https") ?
getSSLConnection(connectUrl) : getConnection(connectUrl);
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("emitMetricsJson to " + connectUrl + ", " + jsonData);
+ }
+ AppCookieManager appCookieManager = getAppCookieManager();
+ String appCookie = appCookieManager.getCachedAppCookie(connectUrl);
+ if (appCookie != null) {
+ if (LOG.isInfoEnabled()) {
+ LOG.info("Using cached app cookie for URL:" + connectUrl);
+ }
+ connection.setRequestProperty(COOKIE, appCookie);
+ }
+
connection.setRequestMethod("POST");
connection.setRequestProperty("Content-Type", "application/json");
connection.setRequestProperty("Connection", "Keep-Alive");
@@ -171,6 +188,37 @@ public abstract class AbstractTimelineMetricsSink {
}
int statusCode = connection.getResponseCode();
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("emitMetricsJson: statusCode = " + statusCode);
+ }
+
+ if (statusCode == HttpStatus.SC_UNAUTHORIZED ) {
+ String wwwAuthHeader = connection.getHeaderField(WWW_AUTHENTICATE);
+ if (LOG.isInfoEnabled()) {
+ LOG.info("Received WWW-Authentication header:" + wwwAuthHeader + ", for URL:" + connectUrl);
+ }
+ if (wwwAuthHeader != null && wwwAuthHeader.trim().startsWith(NEGOTIATE)) {
+ appCookie = appCookieManager.getAppCookie(connectUrl, true);
+ if (appCookie != null) {
+ connection.setRequestProperty(COOKIE, appCookie);
+
+ if (jsonData != null) {
+ try (OutputStream os = connection.getOutputStream()) {
+ os.write(jsonData.getBytes("UTF-8"));
+ }
+ }
+
+ statusCode = connection.getResponseCode();
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("emitMetricsJson: statusCode2 = " + statusCode);
+ }
+ }
+ } else {
+ // no supported authentication type found
+ // we would let the original response propagate
+ LOG.error("Unsupported WWW-Authentication header:" + wwwAuthHeader+ ", for URL:" + connectUrl);
+ }
+ }
if (statusCode != 200) {
LOG.info("Unable to POST metrics to collector, " + connectUrl + ", " +
@@ -265,6 +313,18 @@ public abstract class AbstractTimelineMetricsSink {
}
/**
+ * Get the associated app cookie manager.
+ *
+ * @return the app cookie manager
+ */
+ public synchronized AppCookieManager getAppCookieManager() {
+ if (appCookieManager == null) {
+ appCookieManager = new AppCookieManager();
+ }
+ return appCookieManager;
+ }
+
+ /**
* Cleans up and closes an input stream
* see http://docs.oracle.com/javase/6/docs/technotes/guides/net/http-keepalive.html
* @param is the InputStream to clean up
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AppCookieManager.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AppCookieManager.java b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AppCookieManager.java
new file mode 100644
index 0000000..bcba238
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-common/src/main/java/org/apache/hadoop/metrics2/sink/timeline/AppCookieManager.java
@@ -0,0 +1,219 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.sink.timeline;
+
+import java.io.IOException;
+import java.net.URI;
+import java.security.Principal;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.http.Header;
+import org.apache.http.HeaderElement;
+import org.apache.http.HttpEntity;
+import org.apache.http.HttpHost;
+import org.apache.http.HttpRequest;
+import org.apache.http.HttpResponse;
+import org.apache.http.auth.AuthScope;
+import org.apache.http.auth.Credentials;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.client.methods.HttpOptions;
+import org.apache.http.client.methods.HttpUriRequest;
+import org.apache.http.client.params.AuthPolicy;
+import org.apache.http.impl.auth.SPNegoSchemeFactory;
+import org.apache.http.impl.client.DefaultHttpClient;
+import org.apache.http.util.EntityUtils;
+
+/**
+ * Handles SPNego authentication as a client of hadoop service, caches
+ * hadoop.auth cookie returned by hadoop service on successful SPNego
+ * authentication. Refreshes hadoop.auth cookie on demand if the cookie has
+ * expired.
+ *
+ */
+public class AppCookieManager {
+
+ static final String HADOOP_AUTH = "hadoop.auth";
+ private static final String HADOOP_AUTH_EQ = "hadoop.auth=";
+ private static final String SET_COOKIE = "Set-Cookie";
+
+ private static final EmptyJaasCredentials EMPTY_JAAS_CREDENTIALS = new EmptyJaasCredentials();
+
+ private Map<String, String> endpointCookieMap = new ConcurrentHashMap<String, String>();
+ private static Log LOG = LogFactory.getLog(AppCookieManager.class);
+
+ /**
+ * Utility method to exercise AppCookieManager directly
+ * @param args element 0 of args should be a URL to hadoop service protected by SPengo
+ * @throws IOException in case of errors
+ */
+ public static void main(String[] args) throws IOException {
+ new AppCookieManager().getAppCookie(args[0], false);
+ }
+
+ public AppCookieManager() {
+ }
+
+ /**
+ * Returns hadoop.auth cookie, doing needed SPNego authentication
+ *
+ * @param endpoint
+ * the URL of the Hadoop service
+ * @param refresh
+ * flag indicating wehther to refresh the cookie, if
+ * <code>true</code>, we do a new SPNego authentication and refresh
+ * the cookie even if the cookie already exists in local cache
+ * @return hadoop.auth cookie value
+ * @throws IOException
+ * in case of problem getting hadoop.auth cookie
+ */
+ public String getAppCookie(String endpoint, boolean refresh)
+ throws IOException {
+
+ HttpUriRequest outboundRequest = new HttpGet(endpoint);
+ URI uri = outboundRequest.getURI();
+ String scheme = uri.getScheme();
+ String host = uri.getHost();
+ int port = uri.getPort();
+ String path = uri.getPath();
+ if (!refresh) {
+ String appCookie = endpointCookieMap.get(endpoint);
+ if (appCookie != null) {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("got cached cookie");
+ }
+ return appCookie;
+ }
+ }
+
+ clearAppCookie(endpoint);
+
+ DefaultHttpClient client = new DefaultHttpClient();
+ SPNegoSchemeFactory spNegoSF = new SPNegoSchemeFactory(/* stripPort */true);
+ client.getAuthSchemes().register(AuthPolicy.SPNEGO, spNegoSF);
+ client.getCredentialsProvider().setCredentials(
+ new AuthScope(/* host */null, /* port */-1, /* realm */null),
+ EMPTY_JAAS_CREDENTIALS);
+
+ String hadoopAuthCookie = null;
+ HttpResponse httpResponse = null;
+ try {
+ HttpHost httpHost = new HttpHost(host, port, scheme);
+ HttpRequest httpRequest = new HttpOptions(path);
+ httpResponse = client.execute(httpHost, httpRequest);
+ Header[] headers = httpResponse.getHeaders(SET_COOKIE);
+ if (LOG.isDebugEnabled()) {
+ for (Header header : headers) {
+ LOG.debug(header.getName() + " : " + header.getValue());
+ }
+ }
+ hadoopAuthCookie = getHadoopAuthCookieValue(headers);
+ if (hadoopAuthCookie == null) {
+ int statusCode = httpResponse.getStatusLine().getStatusCode();
+ HttpEntity entity = httpResponse.getEntity();
+ String responseBody = entity != null ? EntityUtils.toString(entity) : null;
+ LOG.error("SPNego authentication failed with statusCode = " + statusCode + ", responseBody = " + responseBody + ", can not get hadoop.auth cookie for URL: " + endpoint);
+ return null;
+ }
+ } finally {
+ if (httpResponse != null) {
+ HttpEntity entity = httpResponse.getEntity();
+ if (entity != null) {
+ entity.getContent().close();
+ }
+ }
+
+ }
+
+ hadoopAuthCookie = HADOOP_AUTH_EQ + quote(hadoopAuthCookie);
+ setAppCookie(endpoint, hadoopAuthCookie);
+ if (LOG.isInfoEnabled()) {
+ LOG.info("Successful SPNego authentication to URL:" + uri.toString());
+ }
+ return hadoopAuthCookie;
+ }
+
+
+ /**
+ * Returns the cached app cookie
+ * @param endpoint the hadoop end point we authenticate to
+ * @return the cached app cookie, can be null
+ */
+ public String getCachedAppCookie(String endpoint) {
+ return endpointCookieMap.get(endpoint);
+ }
+
+ /**
+ * Sets the cached app cookie cache
+ * @param endpoint the hadoop end point we authenticate to
+ * @param appCookie the app cookie
+ */
+ private void setAppCookie(String endpoint, String appCookie) {
+ endpointCookieMap.put(endpoint, appCookie);
+ }
+
+ /**
+ * Clears the cached app cookie
+ * @param endpoint the hadoop end point we authenticate to
+ */
+ private void clearAppCookie(String endpoint) {
+ endpointCookieMap.remove(endpoint);
+ }
+
+ static String quote(String s) {
+ return s == null ? s : "\"" + s + "\"";
+ }
+
+ static String getHadoopAuthCookieValue(Header[] headers) {
+ if (headers == null) {
+ return null;
+ }
+ for (Header header : headers) {
+ HeaderElement[] elements = header.getElements();
+ for (HeaderElement element : elements) {
+ String cookieName = element.getName();
+ if (cookieName.equals(HADOOP_AUTH)) {
+ if (element.getValue() != null) {
+ String trimmedVal = element.getValue().trim();
+ if (!trimmedVal.isEmpty()) {
+ return trimmedVal;
+ }
+ }
+ }
+ }
+ }
+ return null;
+ }
+
+
+ private static class EmptyJaasCredentials implements Credentials {
+
+ public String getPassword() {
+ return null;
+ }
+
+ public Principal getUserPrincipal() {
+ return null;
+ }
+
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/AppCookieManagerTest.java
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/AppCookieManagerTest.java b/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/AppCookieManagerTest.java
new file mode 100644
index 0000000..8355288
--- /dev/null
+++ b/ambari-metrics/ambari-metrics-common/src/test/java/org/apache/hadoop/metrics2/sink/timeline/AppCookieManagerTest.java
@@ -0,0 +1,52 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.metrics2.sink.timeline;
+
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+
+import org.apache.http.Header;
+import org.apache.http.message.BasicHeader;
+import org.junit.Test;
+
+public class AppCookieManagerTest {
+
+ @Test
+ public void getCachedAppCookie() {
+ assertNull(new AppCookieManager().getCachedAppCookie("http://dummy"));
+ }
+
+ @Test
+ public void getHadoopAuthCookieValueWithNullHeaders() {
+ assertNull(AppCookieManager.getHadoopAuthCookieValue(null));
+ }
+
+ @Test
+ public void getHadoopAuthCookieValueWitEmptylHeaders() {
+ assertNull(AppCookieManager.getHadoopAuthCookieValue(new Header[0]));
+ }
+
+ @Test
+ public void getHadoopAuthCookieValueWithValidlHeaders() {
+ Header[] headers = new Header[1];
+ headers[0] = new BasicHeader("Set-Cookie", AppCookieManager.HADOOP_AUTH + "=dummyvalue");
+ assertNotNull(AppCookieManager.getHadoopAuthCookieValue(headers));
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-env.xml b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-env.xml
index db36db8..9c4fc02 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-env.xml
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/configuration/ams-hbase-env.xml
@@ -255,8 +255,8 @@ export HBASE_MANAGES_ZK=false
{% if security_enabled %}
export HBASE_OPTS="$HBASE_OPTS -Djava.security.auth.login.config={{client_jaas_config_file}}"
-export HBASE_MASTER_OPTS="$HBASE_MASTER_OPTS -Djava.security.auth.login.config={{master_jaas_config_file}}"
-export HBASE_REGIONSERVER_OPTS="$HBASE_REGIONSERVER_OPTS -Djava.security.auth.login.config={{regionserver_jaas_config_file}}"
+export HBASE_MASTER_OPTS="$HBASE_MASTER_OPTS -Djava.security.auth.login.config={{master_jaas_config_file}} -Djavax.security.auth.useSubjectCredsOnly=false"
+export HBASE_REGIONSERVER_OPTS="$HBASE_REGIONSERVER_OPTS -Djava.security.auth.login.config={{regionserver_jaas_config_file}} -Djavax.security.auth.useSubjectCredsOnly=false"
export HBASE_ZOOKEEPER_OPTS="$HBASE_ZOOKEEPER_OPTS -Djava.security.auth.login.config={{ams_zookeeper_jaas_config_file}}"
{% endif %}
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hbase_master_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hbase_master_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hbase_master_jaas.conf.j2
index a93c36c..4bb0fc1 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hbase_master_jaas.conf.j2
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hbase_master_jaas.conf.j2
@@ -24,3 +24,13 @@ useTicketCache=false
keyTab="{{master_keytab_path}}"
principal="{{master_jaas_princ}}";
};
+com.sun.security.jgss.krb5.initiate {
+com.sun.security.auth.module.Krb5LoginModule required
+renewTGT=false
+doNotPrompt=true
+useKeyTab=true
+storeKey=true
+useTicketCache=false
+keyTab="{{master_keytab_path}}"
+principal="{{master_jaas_princ}}";
+};
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hbase_regionserver_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hbase_regionserver_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hbase_regionserver_jaas.conf.j2
index 7097481..c9973ca 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hbase_regionserver_jaas.conf.j2
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/hbase_regionserver_jaas.conf.j2
@@ -24,3 +24,13 @@ useTicketCache=false
keyTab="{{regionserver_keytab_path}}"
principal="{{regionserver_jaas_princ}}";
};
+com.sun.security.jgss.krb5.initiate {
+com.sun.security.auth.module.Krb5LoginModule required
+renewTGT=false
+doNotPrompt=true
+useKeyTab=true
+storeKey=true
+useTicketCache=false
+keyTab="{{regionserver_keytab_path}}"
+principal="{{regionserver_jaas_princ}}";
+};
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hbase_master_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hbase_master_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hbase_master_jaas.conf.j2
index a93c36c..4bb0fc1 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hbase_master_jaas.conf.j2
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hbase_master_jaas.conf.j2
@@ -24,3 +24,13 @@ useTicketCache=false
keyTab="{{master_keytab_path}}"
principal="{{master_jaas_princ}}";
};
+com.sun.security.jgss.krb5.initiate {
+com.sun.security.auth.module.Krb5LoginModule required
+renewTGT=false
+doNotPrompt=true
+useKeyTab=true
+storeKey=true
+useTicketCache=false
+keyTab="{{master_keytab_path}}"
+principal="{{master_jaas_princ}}";
+};
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hbase_regionserver_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hbase_regionserver_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hbase_regionserver_jaas.conf.j2
index 7097481..c9973ca 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hbase_regionserver_jaas.conf.j2
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/templates/hbase_regionserver_jaas.conf.j2
@@ -24,3 +24,13 @@ useTicketCache=false
keyTab="{{regionserver_keytab_path}}"
principal="{{regionserver_jaas_princ}}";
};
+com.sun.security.jgss.krb5.initiate {
+com.sun.security.auth.module.Krb5LoginModule required
+renewTGT=false
+doNotPrompt=true
+useKeyTab=true
+storeKey=true
+useTicketCache=false
+keyTab="{{regionserver_keytab_path}}"
+principal="{{regionserver_jaas_princ}}";
+};
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/configuration/hbase-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/configuration/hbase-env.xml b/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/configuration/hbase-env.xml
index da12706..cb30b63 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/configuration/hbase-env.xml
+++ b/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/configuration/hbase-env.xml
@@ -225,8 +225,8 @@ JDK_DEPENDED_OPTS="-XX:PermSize=128m -XX:MaxPermSize=128m"
{% if security_enabled %}
export HBASE_OPTS="$HBASE_OPTS -XX:+UseConcMarkSweepGC -XX:ErrorFile={{log_dir}}/hs_err_pid%p.log -Djava.security.auth.login.config={{client_jaas_config_file}} -Djava.io.tmpdir={{java_io_tmpdir}}"
-export HBASE_MASTER_OPTS="$HBASE_MASTER_OPTS -Xmx{{master_heapsize}} -Djava.security.auth.login.config={{master_jaas_config_file}} $JDK_DEPENDED_OPTS"
-export HBASE_REGIONSERVER_OPTS="$HBASE_REGIONSERVER_OPTS -Xmn{{regionserver_xmn_size}} -XX:CMSInitiatingOccupancyFraction=70 -Xms{{regionserver_heapsize}} -Xmx{{regionserver_heapsize}} -Djava.security.auth.login.config={{regionserver_jaas_config_file}} $JDK_DEPENDED_OPTS"
+export HBASE_MASTER_OPTS="$HBASE_MASTER_OPTS -Xmx{{master_heapsize}} -Djava.security.auth.login.config={{master_jaas_config_file}} -Djavax.security.auth.useSubjectCredsOnly=false $JDK_DEPENDED_OPTS"
+export HBASE_REGIONSERVER_OPTS="$HBASE_REGIONSERVER_OPTS -Xmn{{regionserver_xmn_size}} -XX:CMSInitiatingOccupancyFraction=70 -Xms{{regionserver_heapsize}} -Xmx{{regionserver_heapsize}} -Djava.security.auth.login.config={{regionserver_jaas_config_file}} -Djavax.security.auth.useSubjectCredsOnly=false $JDK_DEPENDED_OPTS"
export PHOENIX_QUERYSERVER_OPTS="$PHOENIX_QUERYSERVER_OPTS -Djava.security.auth.login.config={{queryserver_jaas_config_file}}"
{% else %}
export HBASE_OPTS="$HBASE_OPTS -XX:+UseConcMarkSweepGC -XX:ErrorFile={{log_dir}}/hs_err_pid%p.log -Djava.io.tmpdir={{java_io_tmpdir}}"
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/templates/hbase_master_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/templates/hbase_master_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/templates/hbase_master_jaas.conf.j2
index a93c36c..4bb0fc1 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/templates/hbase_master_jaas.conf.j2
+++ b/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/templates/hbase_master_jaas.conf.j2
@@ -24,3 +24,13 @@ useTicketCache=false
keyTab="{{master_keytab_path}}"
principal="{{master_jaas_princ}}";
};
+com.sun.security.jgss.krb5.initiate {
+com.sun.security.auth.module.Krb5LoginModule required
+renewTGT=false
+doNotPrompt=true
+useKeyTab=true
+storeKey=true
+useTicketCache=false
+keyTab="{{master_keytab_path}}"
+principal="{{master_jaas_princ}}";
+};
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/templates/hbase_regionserver_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/templates/hbase_regionserver_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/templates/hbase_regionserver_jaas.conf.j2
index 7097481..c9973ca 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/templates/hbase_regionserver_jaas.conf.j2
+++ b/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/templates/hbase_regionserver_jaas.conf.j2
@@ -24,3 +24,13 @@ useTicketCache=false
keyTab="{{regionserver_keytab_path}}"
principal="{{regionserver_jaas_princ}}";
};
+com.sun.security.jgss.krb5.initiate {
+com.sun.security.auth.module.Krb5LoginModule required
+renewTGT=false
+doNotPrompt=true
+useKeyTab=true
+storeKey=true
+useTicketCache=false
+keyTab="{{regionserver_keytab_path}}"
+principal="{{regionserver_jaas_princ}}";
+};
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
index d9b62e2..15fda67 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
@@ -51,6 +51,23 @@ def hdfs(name=None):
)
if params.security_enabled:
+ File(os.path.join(params.hadoop_conf_dir, 'hdfs_dn_jaas.conf'),
+ owner=params.hdfs_user,
+ group=params.user_group,
+ content=Template("hdfs_dn_jaas.conf.j2")
+ )
+ File(os.path.join(params.hadoop_conf_dir, 'hdfs_nn_jaas.conf'),
+ owner=params.hdfs_user,
+ group=params.user_group,
+ content=Template("hdfs_nn_jaas.conf.j2")
+ )
+ if params.dfs_ha_enabled:
+ File(os.path.join(params.hadoop_conf_dir, 'hdfs_jn_jaas.conf'),
+ owner=params.hdfs_user,
+ group=params.user_group,
+ content=Template("hdfs_jn_jaas.conf.j2")
+ )
+
tc_mode = 0644
tc_owner = "root"
else:
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/templates/hdfs_dn_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/templates/hdfs_dn_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/templates/hdfs_dn_jaas.conf.j2
new file mode 100644
index 0000000..53583b4
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/templates/hdfs_dn_jaas.conf.j2
@@ -0,0 +1,27 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+com.sun.security.jgss.krb5.initiate {
+ com.sun.security.auth.module.Krb5LoginModule required
+ renewTGT=false
+ doNotPrompt=true
+ useKeyTab=true
+ keyTab="{{dn_keytab}}"
+ principal="{{dn_principal_name}}"
+ storeKey=true
+ useTicketCache=false;
+};
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/templates/hdfs_jn_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/templates/hdfs_jn_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/templates/hdfs_jn_jaas.conf.j2
new file mode 100644
index 0000000..9769a6b
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/templates/hdfs_jn_jaas.conf.j2
@@ -0,0 +1,27 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+com.sun.security.jgss.krb5.initiate {
+ com.sun.security.auth.module.Krb5LoginModule required
+ renewTGT=false
+ doNotPrompt=true
+ useKeyTab=true
+ keyTab="{{jn_keytab}}"
+ principal="{{jn_principal_name}}"
+ storeKey=true
+ useTicketCache=false;
+};
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/templates/hdfs_nn_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/templates/hdfs_nn_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/templates/hdfs_nn_jaas.conf.j2
new file mode 100644
index 0000000..985a477
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/templates/hdfs_nn_jaas.conf.j2
@@ -0,0 +1,27 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+com.sun.security.jgss.krb5.initiate {
+ com.sun.security.auth.module.Krb5LoginModule required
+ renewTGT=false
+ doNotPrompt=true
+ useKeyTab=true
+ keyTab="{{nn_keytab}}"
+ principal="{{nn_principal_name}}"
+ storeKey=true
+ useTicketCache=false;
+};
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs.py b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs.py
index d9b62e2..15fda67 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs.py
@@ -51,6 +51,23 @@ def hdfs(name=None):
)
if params.security_enabled:
+ File(os.path.join(params.hadoop_conf_dir, 'hdfs_dn_jaas.conf'),
+ owner=params.hdfs_user,
+ group=params.user_group,
+ content=Template("hdfs_dn_jaas.conf.j2")
+ )
+ File(os.path.join(params.hadoop_conf_dir, 'hdfs_nn_jaas.conf'),
+ owner=params.hdfs_user,
+ group=params.user_group,
+ content=Template("hdfs_nn_jaas.conf.j2")
+ )
+ if params.dfs_ha_enabled:
+ File(os.path.join(params.hadoop_conf_dir, 'hdfs_jn_jaas.conf'),
+ owner=params.hdfs_user,
+ group=params.user_group,
+ content=Template("hdfs_jn_jaas.conf.j2")
+ )
+
tc_mode = 0644
tc_owner = "root"
else:
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/templates/hdfs_dn_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/templates/hdfs_dn_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/templates/hdfs_dn_jaas.conf.j2
new file mode 100644
index 0000000..53583b4
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/templates/hdfs_dn_jaas.conf.j2
@@ -0,0 +1,27 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+com.sun.security.jgss.krb5.initiate {
+ com.sun.security.auth.module.Krb5LoginModule required
+ renewTGT=false
+ doNotPrompt=true
+ useKeyTab=true
+ keyTab="{{dn_keytab}}"
+ principal="{{dn_principal_name}}"
+ storeKey=true
+ useTicketCache=false;
+};
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/templates/hdfs_jn_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/templates/hdfs_jn_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/templates/hdfs_jn_jaas.conf.j2
new file mode 100644
index 0000000..9769a6b
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/templates/hdfs_jn_jaas.conf.j2
@@ -0,0 +1,27 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+com.sun.security.jgss.krb5.initiate {
+ com.sun.security.auth.module.Krb5LoginModule required
+ renewTGT=false
+ doNotPrompt=true
+ useKeyTab=true
+ keyTab="{{jn_keytab}}"
+ principal="{{jn_principal_name}}"
+ storeKey=true
+ useTicketCache=false;
+};
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/templates/hdfs_nn_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/templates/hdfs_nn_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/templates/hdfs_nn_jaas.conf.j2
new file mode 100644
index 0000000..985a477
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/templates/hdfs_nn_jaas.conf.j2
@@ -0,0 +1,27 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+com.sun.security.jgss.krb5.initiate {
+ com.sun.security.auth.module.Krb5LoginModule required
+ renewTGT=false
+ doNotPrompt=true
+ useKeyTab=true
+ keyTab="{{nn_keytab}}"
+ principal="{{nn_principal_name}}"
+ storeKey=true
+ useTicketCache=false;
+};
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/configuration/kafka-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/configuration/kafka-env.xml b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/configuration/kafka-env.xml
index 91af58e..ad81d66 100644
--- a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/configuration/kafka-env.xml
+++ b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/configuration/kafka-env.xml
@@ -88,7 +88,11 @@ export JAVA_HOME={{java64_home}}
export PATH=$PATH:$JAVA_HOME/bin
export PID_DIR={{kafka_pid_dir}}
export LOG_DIR={{kafka_log_dir}}
+{% if security_enabled %}
+export KAFKA_KERBEROS_PARAMS="-Djavax.security.auth.useSubjectCredsOnly=false {{kafka_kerberos_params}}"
+{% else %}
export KAFKA_KERBEROS_PARAMS={{kafka_kerberos_params}}
+{% endif %}
# Add kafka sink to classpath and related depenencies
if [ -e "/usr/lib/ambari-metrics-kafka-sink/ambari-metrics-kafka-sink.jar" ]; then
export CLASSPATH=$CLASSPATH:/usr/lib/ambari-metrics-kafka-sink/ambari-metrics-kafka-sink.jar
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/configuration/kafka_jaas_conf.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/configuration/kafka_jaas_conf.xml b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/configuration/kafka_jaas_conf.xml
index fdde8f2..8ceb891 100644
--- a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/configuration/kafka_jaas_conf.xml
+++ b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/configuration/kafka_jaas_conf.xml
@@ -49,6 +49,17 @@ useTicketCache=false
serviceName="zookeeper"
principal="{{kafka_jaas_principal}}";
};
+com.sun.security.jgss.krb5.initiate {
+ com.sun.security.auth.module.Krb5LoginModule required
+ renewTGT=false
+ doNotPrompt=true
+ useKeyTab=true
+ keyTab="{{kafka_keytab_path}}"
+ storeKey=true
+ useTicketCache=false
+ serviceName="{{kafka_bare_jaas_principal}}"
+ principal="{{kafka_jaas_principal}}";
+};
</value>
<value-attributes>
<type>content</type>
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/templates/kafka_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/templates/kafka_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/templates/kafka_jaas.conf.j2
index 56c558d..1d9e61d 100644
--- a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/templates/kafka_jaas.conf.j2
+++ b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/templates/kafka_jaas.conf.j2
@@ -39,3 +39,14 @@ Client {
serviceName="zookeeper"
principal="{{kafka_jaas_principal}}";
};
+com.sun.security.jgss.krb5.initiate {
+ com.sun.security.auth.module.Krb5LoginModule required
+ renewTGT=false
+ doNotPrompt=true
+ useKeyTab=true
+ keyTab="{{kafka_keytab_path}}"
+ storeKey=true
+ useTicketCache=false
+ serviceName="{{kafka_bare_jaas_principal}}"
+ principal="{{kafka_jaas_principal}}";
+};
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/storm_yaml_utils.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/storm_yaml_utils.py b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/storm_yaml_utils.py
index 9d78e71..557c9dc 100644
--- a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/storm_yaml_utils.py
+++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/storm_yaml_utils.py
@@ -27,7 +27,10 @@ from resource_management.core.resources.system import File
def replace_jaas_placeholder(name, security_enabled, conf_dir):
if name.find('_JAAS_PLACEHOLDER') > -1:
if security_enabled:
- return name.replace('_JAAS_PLACEHOLDER', '-Djava.security.auth.login.config=' + conf_dir + '/storm_jaas.conf')
+ if name.find('Nimbus_JVM') > -1:
+ return name.replace('_JAAS_PLACEHOLDER', '-Djava.security.auth.login.config=' + conf_dir + '/storm_jaas.conf -Djavax.security.auth.useSubjectCredsOnly=false')
+ else:
+ return name.replace('_JAAS_PLACEHOLDER', '-Djava.security.auth.login.config=' + conf_dir + '/storm_jaas.conf')
else:
return name.replace('_JAAS_PLACEHOLDER', '')
else:
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/templates/storm_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/templates/storm_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/templates/storm_jaas.conf.j2
index c22cb51..d131e62 100644
--- a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/templates/storm_jaas.conf.j2
+++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/templates/storm_jaas.conf.j2
@@ -41,6 +41,16 @@ RegistryClient {
useTicketCache=false
principal="{{storm_jaas_principal}}";
};
+com.sun.security.jgss.krb5.initiate {
+ com.sun.security.auth.module.Krb5LoginModule required
+ renewTGT=false
+ doNotPrompt=true
+ useKeyTab=true
+ keyTab="{{nimbus_keytab_path}}"
+ principal="{{nimbus_jaas_principal}}"
+ storeKey=true
+ useTicketCache=false;
+};
{% endif %}
Client {
com.sun.security.auth.module.Krb5LoginModule required
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
index 3579fcb..f474a89 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
@@ -249,6 +249,9 @@ nm_hosts = default("/clusterHostInfo/nm_hosts", [])
# don't using len(nm_hosts) here, because check can take too much time on large clusters
number_of_nm = 1
+hs_host = default("/clusterHostInfo/hs_host", [])
+has_hs = not len(hs_host) == 0
+
# default kinit commands
rm_kinit_cmd = ""
yarn_timelineservice_kinit_cmd = ""
@@ -272,19 +275,26 @@ if security_enabled:
# YARN timeline security options
if has_ats:
- _yarn_timelineservice_principal_name = config['configurations']['yarn-site']['yarn.timeline-service.principal']
- _yarn_timelineservice_principal_name = _yarn_timelineservice_principal_name.replace('_HOST', hostname.lower())
- _yarn_timelineservice_keytab = config['configurations']['yarn-site']['yarn.timeline-service.keytab']
- yarn_timelineservice_kinit_cmd = format("{kinit_path_local} -kt {_yarn_timelineservice_keytab} {_yarn_timelineservice_principal_name};")
+ yarn_timelineservice_principal_name = config['configurations']['yarn-site']['yarn.timeline-service.principal']
+ yarn_timelineservice_principal_name = yarn_timelineservice_principal_name.replace('_HOST', hostname.lower())
+ yarn_timelineservice_keytab = config['configurations']['yarn-site']['yarn.timeline-service.keytab']
+ yarn_timelineservice_kinit_cmd = format("{kinit_path_local} -kt {yarn_timelineservice_keytab} {yarn_timelineservice_principal_name};")
+ yarn_ats_jaas_file = os.path.join(config_dir, 'yarn_ats_jaas.conf')
if 'yarn.nodemanager.principal' in config['configurations']['yarn-site']:
- _nodemanager_principal_name = default('/configurations/yarn-site/yarn.nodemanager.principal', None)
- if _nodemanager_principal_name:
- _nodemanager_principal_name = _nodemanager_principal_name.replace('_HOST', hostname.lower())
-
- _nodemanager_keytab = config['configurations']['yarn-site']['yarn.nodemanager.keytab']
- nodemanager_kinit_cmd = format("{kinit_path_local} -kt {_nodemanager_keytab} {_nodemanager_principal_name};")
-
+ nodemanager_principal_name = default('/configurations/yarn-site/yarn.nodemanager.principal', None)
+ if nodemanager_principal_name:
+ nodemanager_principal_name = nodemanager_principal_name.replace('_HOST', hostname.lower())
+
+ nodemanager_keytab = config['configurations']['yarn-site']['yarn.nodemanager.keytab']
+ nodemanager_kinit_cmd = format("{kinit_path_local} -kt {nodemanager_keytab} {nodemanager_principal_name};")
+ yarn_nm_jaas_file = os.path.join(config_dir, 'yarn_nm_jaas.conf')
+
+ if has_hs:
+ mapred_jhs_principal_name = config['configurations']['mapred-site']['mapreduce.jobhistory.principal']
+ mapred_jhs_principal_name = mapred_jhs_principal_name.replace('_HOST', hostname.lower())
+ mapred_jhs_keytab = config['configurations']['mapred-site']['mapreduce.jobhistory.keytab']
+ mapred_jaas_file = os.path.join(config_dir, 'mapred_jaas.conf')
yarn_log_aggregation_enabled = config['configurations']['yarn-site']['yarn.log-aggregation-enable']
yarn_nm_app_log_dir = config['configurations']['yarn-site']['yarn.nodemanager.remote-app-log-dir']
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
index 5ef08ad..28d14fe 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
@@ -192,6 +192,23 @@ def yarn(name=None, config_dir=None):
group=params.user_group,
content=Template("yarn_jaas.conf.j2")
)
+ if params.has_ats:
+ File(os.path.join(config_dir, 'yarn_ats_jaas.conf'),
+ owner=params.yarn_user,
+ group=params.user_group,
+ content=Template("yarn_ats_jaas.conf.j2")
+ )
+ File(os.path.join(config_dir, 'yarn_nm_jaas.conf'),
+ owner=params.yarn_user,
+ group=params.user_group,
+ content=Template("yarn_nm_jaas.conf.j2")
+ )
+ if params.has_hs:
+ File(os.path.join(config_dir, 'mapred_jaas.conf'),
+ owner=params.mapred_user,
+ group=params.user_group,
+ content=Template("mapred_jaas.conf.j2")
+ )
else:
File(os.path.join(config_dir, 'taskcontroller.cfg'),
owner=params.tc_owner,
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/mapred_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/mapred_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/mapred_jaas.conf.j2
new file mode 100644
index 0000000..67f4bcb
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/mapred_jaas.conf.j2
@@ -0,0 +1,28 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+
+com.sun.security.jgss.krb5.initiate {
+ com.sun.security.auth.module.Krb5LoginModule required
+ renewTGT=false
+ doNotPrompt=true
+ useKeyTab=true
+ keyTab="{{mapred_jhs_keytab}}"
+ principal="{{mapred_jhs_principal_name}}"
+ storeKey=true
+ useTicketCache=false;
+};
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/yarn_ats_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/yarn_ats_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/yarn_ats_jaas.conf.j2
new file mode 100644
index 0000000..55308e8
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/yarn_ats_jaas.conf.j2
@@ -0,0 +1,27 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+com.sun.security.jgss.krb5.initiate {
+ com.sun.security.auth.module.Krb5LoginModule required
+ renewTGT=false
+ doNotPrompt=true
+ useKeyTab=true
+ keyTab="{{yarn_timelineservice_keytab}}"
+ principal="{{yarn_timelineservice_principal_name}}"
+ storeKey=true
+ useTicketCache=false;
+};
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/yarn_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/yarn_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/yarn_jaas.conf.j2
index 483c815..99f0a1b 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/yarn_jaas.conf.j2
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/yarn_jaas.conf.j2
@@ -23,4 +23,14 @@ Client {
useTicketCache=false
keyTab="{{rm_keytab}}"
principal="{{rm_principal_name}}";
-};
\ No newline at end of file
+};
+com.sun.security.jgss.krb5.initiate {
+ com.sun.security.auth.module.Krb5LoginModule required
+ renewTGT=false
+ doNotPrompt=true
+ useKeyTab=true
+ keyTab="{{rm_keytab}}"
+ principal="{{rm_principal_name}}"
+ storeKey=true
+ useTicketCache=false;
+};
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/yarn_nm_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/yarn_nm_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/yarn_nm_jaas.conf.j2
new file mode 100644
index 0000000..b501c82
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/yarn_nm_jaas.conf.j2
@@ -0,0 +1,27 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+com.sun.security.jgss.krb5.initiate {
+ com.sun.security.auth.module.Krb5LoginModule required
+ renewTGT=false
+ doNotPrompt=true
+ useKeyTab=true
+ keyTab="{{nodemanager_keytab}}"
+ principal="{{nodemanager_principal_name}}"
+ storeKey=true
+ useTicketCache=false;
+};
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/configuration-mapred/mapred-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/configuration-mapred/mapred-env.xml b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/configuration-mapred/mapred-env.xml
index 07cfafe..93e5234 100644
--- a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/configuration-mapred/mapred-env.xml
+++ b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/configuration-mapred/mapred-env.xml
@@ -89,7 +89,9 @@
export HADOOP_MAPRED_ROOT_LOGGER=INFO,RFA
- #export HADOOP_JOB_HISTORYSERVER_OPTS=
+ {% if security_enabled %}
+ export HADOOP_JOB_HISTORYSERVER_OPTS="-Djava.security.auth.login.config={{mapred_jaas_file}} -Djavax.security.auth.useSubjectCredsOnly=false"
+ {% endif %}
#export HADOOP_MAPRED_LOG_DIR="" # Where log files are stored. $HADOOP_MAPRED_HOME/logs by default.
#export HADOOP_JHS_LOGGER=INFO,RFA # Hadoop JobSummary logger.
#export HADOOP_MAPRED_PID_DIR= # The pid files are stored. /tmp by default.
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/configuration/yarn-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/configuration/yarn-env.xml b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/configuration/yarn-env.xml
index 6a52865..aaa72d1 100644
--- a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/configuration/yarn-env.xml
+++ b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/configuration/yarn-env.xml
@@ -220,7 +220,9 @@ export YARN_RESOURCEMANAGER_HEAPSIZE={{resourcemanager_heapsize}}
# Specify the JVM options to be used when starting the ResourceManager.
# These options will be appended to the options specified as YARN_OPTS
# and therefore may override any similar flags set in YARN_OPTS
-#export YARN_RESOURCEMANAGER_OPTS=
+{% if security_enabled %}
+export YARN_RESOURCEMANAGER_OPTS="-Djava.security.auth.login.config={{yarn_jaas_file}}"
+{% endif %}
# Node Manager specific parameters
@@ -242,10 +244,16 @@ export YARN_NODEMANAGER_HEAPSIZE={{nodemanager_heapsize}}
# or JAVA_HEAP_MAX with YARN_HEAPMAX as the preferred option of the two.
export YARN_TIMELINESERVER_HEAPSIZE={{apptimelineserver_heapsize}}
+{% if security_enabled %}
+export YARN_TIMELINESERVER_OPTS="-Djava.security.auth.login.config={{yarn_ats_jaas_file}}"
+{% endif %}
+
# Specify the JVM options to be used when starting the NodeManager.
# These options will be appended to the options specified as YARN_OPTS
# and therefore may override any similar flags set in YARN_OPTS
-#export YARN_NODEMANAGER_OPTS=
+{% if security_enabled %}
+export YARN_NODEMANAGER_OPTS="-Djava.security.auth.login.config={{yarn_nm_jaas_file}}"
+{% endif %}
# so that filenames w/ spaces are handled correctly in loops below
IFS=
@@ -286,6 +294,9 @@ YARN_OPTS="$YARN_OPTS -Djava.io.tmpdir={{hadoop_java_io_tmpdir}}"
{% if rm_security_opts is defined %}
YARN_OPTS="{{rm_security_opts}} $YARN_OPTS"
{% endif %}
+{% if security_enabled %}
+YARN_OPTS="$YARN_OPTS -Djavax.security.auth.useSubjectCredsOnly=false"
+{% endif %}
</value>
<value-attributes>
<type>content</type>
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/params_linux.py
index 66194ed..a05d259 100644
--- a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/params_linux.py
@@ -247,6 +247,9 @@ nm_hosts = default("/clusterHostInfo/nm_hosts", [])
# don't using len(nm_hosts) here, because check can take too much time on large clusters
number_of_nm = 1
+hs_host = default("/clusterHostInfo/hs_host", [])
+has_hs = not len(hs_host) == 0
+
# default kinit commands
rm_kinit_cmd = ""
yarn_timelineservice_kinit_cmd = ""
@@ -268,19 +271,26 @@ if security_enabled:
# YARN timeline security options
if has_ats:
- _yarn_timelineservice_principal_name = config['configurations']['yarn-site']['yarn.timeline-service.principal']
- _yarn_timelineservice_principal_name = _yarn_timelineservice_principal_name.replace('_HOST', hostname.lower())
- _yarn_timelineservice_keytab = config['configurations']['yarn-site']['yarn.timeline-service.keytab']
- yarn_timelineservice_kinit_cmd = format("{kinit_path_local} -kt {_yarn_timelineservice_keytab} {_yarn_timelineservice_principal_name};")
+ yarn_timelineservice_principal_name = config['configurations']['yarn-site']['yarn.timeline-service.principal']
+ yarn_timelineservice_principal_name = yarn_timelineservice_principal_name.replace('_HOST', hostname.lower())
+ yarn_timelineservice_keytab = config['configurations']['yarn-site']['yarn.timeline-service.keytab']
+ yarn_timelineservice_kinit_cmd = format("{kinit_path_local} -kt {yarn_timelineservice_keytab} {yarn_timelineservice_principal_name};")
+ yarn_ats_jaas_file = os.path.join(config_dir, 'yarn_ats_jaas.conf')
if 'yarn.nodemanager.principal' in config['configurations']['yarn-site']:
- _nodemanager_principal_name = default('/configurations/yarn-site/yarn.nodemanager.principal', None)
- if _nodemanager_principal_name:
- _nodemanager_principal_name = _nodemanager_principal_name.replace('_HOST', hostname.lower())
-
- _nodemanager_keytab = config['configurations']['yarn-site']['yarn.nodemanager.keytab']
- nodemanager_kinit_cmd = format("{kinit_path_local} -kt {_nodemanager_keytab} {_nodemanager_principal_name};")
-
+ nodemanager_principal_name = default('/configurations/yarn-site/yarn.nodemanager.principal', None)
+ if nodemanager_principal_name:
+ nodemanager_principal_name = nodemanager_principal_name.replace('_HOST', hostname.lower())
+
+ nodemanager_keytab = config['configurations']['yarn-site']['yarn.nodemanager.keytab']
+ nodemanager_kinit_cmd = format("{kinit_path_local} -kt {nodemanager_keytab} {nodemanager_principal_name};")
+ yarn_nm_jaas_file = os.path.join(config_dir, 'yarn_nm_jaas.conf')
+
+ if has_hs:
+ mapred_jhs_principal_name = config['configurations']['mapred-site']['mapreduce.jobhistory.principal']
+ mapred_jhs_principal_name = mapred_jhs_principal_name.replace('_HOST', hostname.lower())
+ mapred_jhs_keytab = config['configurations']['mapred-site']['mapreduce.jobhistory.keytab']
+ mapred_jaas_file = os.path.join(config_dir, 'mapred_jaas.conf')
yarn_log_aggregation_enabled = config['configurations']['yarn-site']['yarn.log-aggregation-enable']
yarn_nm_app_log_dir = config['configurations']['yarn-site']['yarn.nodemanager.remote-app-log-dir']
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/yarn.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/yarn.py b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/yarn.py
index 768411c..0591511 100644
--- a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/yarn.py
+++ b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/scripts/yarn.py
@@ -192,7 +192,24 @@ def yarn(name=None, config_dir=None):
owner=params.yarn_user,
group=params.user_group,
content=Template("yarn_jaas.conf.j2")
- )
+ )
+ if params.has_ats:
+ File(os.path.join(config_dir, 'yarn_ats_jaas.conf'),
+ owner=params.yarn_user,
+ group=params.user_group,
+ content=Template("yarn_ats_jaas.conf.j2")
+ )
+ File(os.path.join(config_dir, 'yarn_nm_jaas.conf'),
+ owner=params.yarn_user,
+ group=params.user_group,
+ content=Template("yarn_nm_jaas.conf.j2")
+ )
+ if params.has_hs:
+ File(os.path.join(config_dir, 'mapred_jaas.conf'),
+ owner=params.mapred_user,
+ group=params.user_group,
+ content=Template("mapred_jaas.conf.j2")
+ )
else:
File(os.path.join(config_dir, 'taskcontroller.cfg'),
owner=params.tc_owner,
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/templates/mapred_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/templates/mapred_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/templates/mapred_jaas.conf.j2
new file mode 100644
index 0000000..67f4bcb
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/templates/mapred_jaas.conf.j2
@@ -0,0 +1,28 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+
+com.sun.security.jgss.krb5.initiate {
+ com.sun.security.auth.module.Krb5LoginModule required
+ renewTGT=false
+ doNotPrompt=true
+ useKeyTab=true
+ keyTab="{{mapred_jhs_keytab}}"
+ principal="{{mapred_jhs_principal_name}}"
+ storeKey=true
+ useTicketCache=false;
+};
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/templates/yarn_ats_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/templates/yarn_ats_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/templates/yarn_ats_jaas.conf.j2
new file mode 100644
index 0000000..55308e8
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/templates/yarn_ats_jaas.conf.j2
@@ -0,0 +1,27 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+com.sun.security.jgss.krb5.initiate {
+ com.sun.security.auth.module.Krb5LoginModule required
+ renewTGT=false
+ doNotPrompt=true
+ useKeyTab=true
+ keyTab="{{yarn_timelineservice_keytab}}"
+ principal="{{yarn_timelineservice_principal_name}}"
+ storeKey=true
+ useTicketCache=false;
+};
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/templates/yarn_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/templates/yarn_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/templates/yarn_jaas.conf.j2
index 483c815..99f0a1b 100644
--- a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/templates/yarn_jaas.conf.j2
+++ b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/templates/yarn_jaas.conf.j2
@@ -23,4 +23,14 @@ Client {
useTicketCache=false
keyTab="{{rm_keytab}}"
principal="{{rm_principal_name}}";
-};
\ No newline at end of file
+};
+com.sun.security.jgss.krb5.initiate {
+ com.sun.security.auth.module.Krb5LoginModule required
+ renewTGT=false
+ doNotPrompt=true
+ useKeyTab=true
+ keyTab="{{rm_keytab}}"
+ principal="{{rm_principal_name}}"
+ storeKey=true
+ useTicketCache=false;
+};
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/templates/yarn_nm_jaas.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/templates/yarn_nm_jaas.conf.j2 b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/templates/yarn_nm_jaas.conf.j2
new file mode 100644
index 0000000..b501c82
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/YARN/3.0.0.3.0/package/templates/yarn_nm_jaas.conf.j2
@@ -0,0 +1,27 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+com.sun.security.jgss.krb5.initiate {
+ com.sun.security.auth.module.Krb5LoginModule required
+ renewTGT=false
+ doNotPrompt=true
+ useKeyTab=true
+ keyTab="{{nodemanager_keytab}}"
+ principal="{{nodemanager_principal_name}}"
+ storeKey=true
+ useTicketCache=false;
+};
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration-mapred/mapred-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration-mapred/mapred-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration-mapred/mapred-env.xml
index 869f44a..67d33db 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration-mapred/mapred-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/YARN/configuration-mapred/mapred-env.xml
@@ -32,7 +32,9 @@ export HADOOP_JOB_HISTORYSERVER_HEAPSIZE={{jobhistory_heapsize}}
export HADOOP_MAPRED_ROOT_LOGGER=INFO,RFA
-#export HADOOP_JOB_HISTORYSERVER_OPTS=
+{% if security_enabled %}
+export HADOOP_JOB_HISTORYSERVER_OPTS="-Djava.security.auth.login.config={{mapred_jaas_file}} -Djavax.security.auth.useSubjectCredsOnly=false"
+{% endif %}
#export HADOOP_MAPRED_LOG_DIR="" # Where log files are stored. $HADOOP_MAPRED_HOME/logs by default.
#export HADOOP_JHS_LOGGER=INFO,RFA # Hadoop JobSummary logger.
#export HADOOP_MAPRED_PID_DIR= # The pid files are stored. /tmp by default.
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/stacks/HDP/2.3/services/HBASE/configuration/hbase-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HBASE/configuration/hbase-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HBASE/configuration/hbase-env.xml
index d2b3671..45e137c 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HBASE/configuration/hbase-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HBASE/configuration/hbase-env.xml
@@ -90,8 +90,8 @@ JDK_DEPENDED_OPTS="-XX:PermSize=128m -XX:MaxPermSize=128m"
{% if security_enabled %}
export HBASE_OPTS="$HBASE_OPTS -XX:+UseConcMarkSweepGC -XX:ErrorFile={{log_dir}}/hs_err_pid%p.log -Djava.security.auth.login.config={{client_jaas_config_file}} -Djava.io.tmpdir={{java_io_tmpdir}}"
-export HBASE_MASTER_OPTS="$HBASE_MASTER_OPTS -Xmx{{master_heapsize}} -Djava.security.auth.login.config={{master_jaas_config_file}} $JDK_DEPENDED_OPTS"
-export HBASE_REGIONSERVER_OPTS="$HBASE_REGIONSERVER_OPTS -Xmn{{regionserver_xmn_size}} -XX:CMSInitiatingOccupancyFraction=70 -Xms{{regionserver_heapsize}} -Xmx{{regionserver_heapsize}} -Djava.security.auth.login.config={{regionserver_jaas_config_file}} $JDK_DEPENDED_OPTS"
+export HBASE_MASTER_OPTS="$HBASE_MASTER_OPTS -Xmx{{master_heapsize}} -Djava.security.auth.login.config={{master_jaas_config_file}} -Djavax.security.auth.useSubjectCredsOnly=false $JDK_DEPENDED_OPTS"
+export HBASE_REGIONSERVER_OPTS="$HBASE_REGIONSERVER_OPTS -Xmn{{regionserver_xmn_size}} -XX:CMSInitiatingOccupancyFraction=70 -Xms{{regionserver_heapsize}} -Xmx{{regionserver_heapsize}} -Djava.security.auth.login.config={{regionserver_jaas_config_file}} -Djavax.security.auth.useSubjectCredsOnly=false $JDK_DEPENDED_OPTS"
export PHOENIX_QUERYSERVER_OPTS="$PHOENIX_QUERYSERVER_OPTS -Djava.security.auth.login.config={{queryserver_jaas_config_file}}"
{% else %}
export HBASE_OPTS="$HBASE_OPTS -XX:+UseConcMarkSweepGC -XX:ErrorFile={{log_dir}}/hs_err_pid%p.log -Djava.io.tmpdir={{java_io_tmpdir}}"
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml
index 1bfd2fe..eb04aa4 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hadoop-env.xml
@@ -81,6 +81,13 @@ export HADOOP_SECONDARYNAMENODE_OPTS="${SHARED_HADOOP_NAMENODE_OPTS} -XX:OnOutOf
export HADOOP_CLIENT_OPTS="-Xmx${HADOOP_HEAPSIZE}m $HADOOP_CLIENT_OPTS"
{% endif %}
+{% if security_enabled %}
+export HADOOP_NAMENODE_OPTS="$HADOOP_NAMENODE_OPTS -Djava.security.auth.login.config={{hadoop_conf_dir}}/hdfs_nn_jaas.conf -Djavax.security.auth.useSubjectCredsOnly=false"
+export HADOOP_SECONDARYNAMENODE_OPTS="$HADOOP_SECONDARYNAMENODE_OPTS -Djava.security.auth.login.config={{hadoop_conf_dir}}/hdfs_nn_jaas.conf -Djavax.security.auth.useSubjectCredsOnly=false"
+export HADOOP_DATANODE_OPTS="$HADOOP_DATANODE_OPTS -Djava.security.auth.login.config={{hadoop_conf_dir}}/hdfs_dn_jaas.conf -Djavax.security.auth.useSubjectCredsOnly=false"
+export HADOOP_JOURNALNODE_OPTS="$HADOOP_JOURNALNODE_OPTS -Djava.security.auth.login.config={{hadoop_conf_dir}}/hdfs_jn_jaas.conf -Djavax.security.auth.useSubjectCredsOnly=false"
+{% endif %}
+
HADOOP_NFS3_OPTS="-Xmx{{nfsgateway_heapsize}}m -Dhadoop.security.logger=ERROR,DRFAS ${HADOOP_NFS3_OPTS}"
HADOOP_BALANCER_OPTS="-server -Xmx{{hadoop_heapsize}}m ${HADOOP_BALANCER_OPTS}"
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/configuration/yarn-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/configuration/yarn-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/configuration/yarn-env.xml
index 190684c..9bfa2fe 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/configuration/yarn-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/YARN/configuration/yarn-env.xml
@@ -90,8 +90,9 @@
# Specify the JVM options to be used when starting the ResourceManager.
# These options will be appended to the options specified as YARN_OPTS
# and therefore may override any similar flags set in YARN_OPTS
- #export YARN_RESOURCEMANAGER_OPTS=
-
+ {% if security_enabled %}
+ export YARN_RESOURCEMANAGER_OPTS="-Djava.security.auth.login.config={{yarn_jaas_file}}"
+ {% endif %}
# Node Manager specific parameters
# Specify the max Heapsize for the NodeManager using a numerical value
@@ -112,10 +113,16 @@
# or JAVA_HEAP_MAX with YARN_HEAPMAX as the preferred option of the two.
export YARN_TIMELINESERVER_HEAPSIZE={{apptimelineserver_heapsize}}
+ {% if security_enabled %}
+ export YARN_TIMELINESERVER_OPTS="-Djava.security.auth.login.config={{yarn_ats_jaas_file}}"
+ {% endif %}
+
# Specify the JVM options to be used when starting the NodeManager.
# These options will be appended to the options specified as YARN_OPTS
# and therefore may override any similar flags set in YARN_OPTS
- #export YARN_NODEMANAGER_OPTS=
+ {% if security_enabled %}
+ export YARN_NODEMANAGER_OPTS="-Djava.security.auth.login.config={{yarn_nm_jaas_file}}"
+ {% endif %}
# so that filenames w/ spaces are handled correctly in loops below
IFS=
@@ -153,6 +160,9 @@
fi
YARN_OPTS="$YARN_OPTS -Dyarn.policy.file=$YARN_POLICYFILE"
YARN_OPTS="$YARN_OPTS -Djava.io.tmpdir={{hadoop_java_io_tmpdir}}"
+ {% if security_enabled %}
+ YARN_OPTS="$YARN_OPTS -Djavax.security.auth.useSubjectCredsOnly=false"
+ {% endif %}
</value>
<value-attributes>
<type>content</type>
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/stacks/HDP/2.4/services/HDFS/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.4/services/HDFS/configuration/hadoop-env.xml b/ambari-server/src/main/resources/stacks/HDP/2.4/services/HDFS/configuration/hadoop-env.xml
index 1bfd2fe..eb04aa4 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.4/services/HDFS/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.4/services/HDFS/configuration/hadoop-env.xml
@@ -81,6 +81,13 @@ export HADOOP_SECONDARYNAMENODE_OPTS="${SHARED_HADOOP_NAMENODE_OPTS} -XX:OnOutOf
export HADOOP_CLIENT_OPTS="-Xmx${HADOOP_HEAPSIZE}m $HADOOP_CLIENT_OPTS"
{% endif %}
+{% if security_enabled %}
+export HADOOP_NAMENODE_OPTS="$HADOOP_NAMENODE_OPTS -Djava.security.auth.login.config={{hadoop_conf_dir}}/hdfs_nn_jaas.conf -Djavax.security.auth.useSubjectCredsOnly=false"
+export HADOOP_SECONDARYNAMENODE_OPTS="$HADOOP_SECONDARYNAMENODE_OPTS -Djava.security.auth.login.config={{hadoop_conf_dir}}/hdfs_nn_jaas.conf -Djavax.security.auth.useSubjectCredsOnly=false"
+export HADOOP_DATANODE_OPTS="$HADOOP_DATANODE_OPTS -Djava.security.auth.login.config={{hadoop_conf_dir}}/hdfs_dn_jaas.conf -Djavax.security.auth.useSubjectCredsOnly=false"
+export HADOOP_JOURNALNODE_OPTS="$HADOOP_JOURNALNODE_OPTS -Djava.security.auth.login.config={{hadoop_conf_dir}}/hdfs_jn_jaas.conf -Djavax.security.auth.useSubjectCredsOnly=false"
+{% endif %}
+
HADOOP_NFS3_OPTS="-Xmx{{nfsgateway_heapsize}}m -Dhadoop.security.logger=ERROR,DRFAS ${HADOOP_NFS3_OPTS}"
HADOOP_BALANCER_OPTS="-server -Xmx{{hadoop_heapsize}}m ${HADOOP_BALANCER_OPTS}"
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/stacks/HDP/3.0/services/HDFS/configuration/hadoop-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/3.0/services/HDFS/configuration/hadoop-env.xml b/ambari-server/src/main/resources/stacks/HDP/3.0/services/HDFS/configuration/hadoop-env.xml
index 9d504db..4814efe 100644
--- a/ambari-server/src/main/resources/stacks/HDP/3.0/services/HDFS/configuration/hadoop-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/3.0/services/HDFS/configuration/hadoop-env.xml
@@ -82,6 +82,13 @@
export HADOOP_CLIENT_OPTS="-Xmx${HADOOP_HEAPSIZE}m $HADOOP_CLIENT_OPTS"
{% endif %}
+ {% if security_enabled %}
+ export HADOOP_NAMENODE_OPTS="$HADOOP_NAMENODE_OPTS -Djava.security.auth.login.config={{hadoop_conf_dir}}/hdfs_nn_jaas.conf -Djavax.security.auth.useSubjectCredsOnly=false"
+ export HADOOP_SECONDARYNAMENODE_OPTS="$HADOOP_SECONDARYNAMENODE_OPTS -Djava.security.auth.login.config={{hadoop_conf_dir}}/hdfs_nn_jaas.conf -Djavax.security.auth.useSubjectCredsOnly=false"
+ export HADOOP_DATANODE_OPTS="$HADOOP_DATANODE_OPTS -Djava.security.auth.login.config={{hadoop_conf_dir}}/hdfs_dn_jaas.conf -Djavax.security.auth.useSubjectCredsOnly=false"
+ export HADOOP_JOURNALNODE_OPTS="$HADOOP_JOURNALNODE_OPTS -Djava.security.auth.login.config={{hadoop_conf_dir}}/hdfs_jn_jaas.conf -Djavax.security.auth.useSubjectCredsOnly=false"
+ {% endif %}
+
HADOOP_NFS3_OPTS="-Xmx{{nfsgateway_heapsize}}m -Dhadoop.security.logger=ERROR,DRFAS ${HADOOP_NFS3_OPTS}"
HADOOP_BALANCER_OPTS="-server -Xmx{{hadoop_heapsize}}m ${HADOOP_BALANCER_OPTS}"
http://git-wip-us.apache.org/repos/asf/ambari/blob/4aaf259e/ambari-server/src/main/resources/stacks/HDP/3.0/services/YARN/configuration-mapred/mapred-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/3.0/services/YARN/configuration-mapred/mapred-env.xml b/ambari-server/src/main/resources/stacks/HDP/3.0/services/YARN/configuration-mapred/mapred-env.xml
index a143660..b044cb6 100644
--- a/ambari-server/src/main/resources/stacks/HDP/3.0/services/YARN/configuration-mapred/mapred-env.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/3.0/services/YARN/configuration-mapred/mapred-env.xml
@@ -31,7 +31,9 @@
export HADOOP_MAPRED_ROOT_LOGGER=INFO,RFA
- #export HADOOP_JOB_HISTORYSERVER_OPTS=
+ {% if security_enabled %}
+ export HADOOP_JOB_HISTORYSERVER_OPTS="-Djava.security.auth.login.config={{mapred_jaas_file}} -Djavax.security.auth.useSubjectCredsOnly=false"
+ {% endif %}
#export HADOOP_MAPRED_LOG_DIR="" # Where log files are stored. $HADOOP_MAPRED_HOME/logs by default.
#export HADOOP_JHS_LOGGER=INFO,RFA # Hadoop JobSummary logger.
#export HADOOP_MAPRED_PID_DIR= # The pid files are stored. /tmp by default.
[3/9] ambari git commit: Updated team page. (yusaku)
Posted by rl...@apache.org.
Updated team page. (yusaku)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/eb7fbbdc
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/eb7fbbdc
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/eb7fbbdc
Branch: refs/heads/branch-feature-AMBARI-20859
Commit: eb7fbbdc6ecdc6a0b1af26ddfd66d4b30d528a4f
Parents: 4aaf259
Author: Yusaku Sako <yu...@hortonworks.com>
Authored: Thu Jun 8 15:22:11 2017 -0700
Committer: Yusaku Sako <yu...@hortonworks.com>
Committed: Thu Jun 8 15:22:11 2017 -0700
----------------------------------------------------------------------
docs/pom.xml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/eb7fbbdc/docs/pom.xml
----------------------------------------------------------------------
diff --git a/docs/pom.xml b/docs/pom.xml
index a9c48d6..95478e4 100644
--- a/docs/pom.xml
+++ b/docs/pom.xml
@@ -239,7 +239,7 @@
<email>avijayan@apache.org</email>
<timezone>-8</timezone>
<roles>
- <role>Committer</role>
+ <role>PMC</role>
</roles>
<organization>
Hortonworks
[7/9] ambari git commit: AMBARI-21128 Add AMS HA support to local
metrics aggregator application (dsen)
Posted by rl...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/29f75089/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/controller.py
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/controller.py b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/controller.py
index e5da9ba..d161269 100644
--- a/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/controller.py
+++ b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/controller.py
@@ -138,7 +138,7 @@ class Controller(threading.Thread):
if self.aggregator:
self.aggregator.stop()
if self.aggregator_watchdog:
- self.aggregator.stop()
+ self.aggregator_watchdog.stop()
self.aggregator = Aggregator(self.config, self._stop_handler)
self.aggregator_watchdog = AggregatorWatchdog(self.config, self._stop_handler)
self.aggregator.start()
[5/9] ambari git commit: AMBARI-20929: Changes in Zeppelin JDBC
config after ZEPPELIN-2367 (Prabhjyot Singh via nitirajrathore)
Posted by rl...@apache.org.
AMBARI-20929: Changes in Zeppelin JDBC config after ZEPPELIN-2367 (Prabhjyot Singh via nitirajrathore)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/215bd7aa
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/215bd7aa
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/215bd7aa
Branch: refs/heads/branch-feature-AMBARI-20859
Commit: 215bd7aaa3cdefe1bdc9242c4fa8e9d0106d8740
Parents: 57bb136
Author: Nitiraj Singh Rathore <ni...@gmail.com>
Authored: Fri Jun 9 14:19:38 2017 +0530
Committer: Nitiraj Singh Rathore <ni...@gmail.com>
Committed: Fri Jun 9 14:20:37 2017 +0530
----------------------------------------------------------------------
.../common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/215bd7aa/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py
index c2f81639..3d516b0 100644
--- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py
+++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/package/scripts/master.py
@@ -302,7 +302,7 @@ class Master(Script):
params.hbase_zookeeper_quorum + ':' + \
params.zookeeper_znode_parent
else:
- interpreter['properties']['zeppelin.jdbc.auth.type'] = ""
+ interpreter['properties']['zeppelin.jdbc.auth.type'] = "SIMPLE"
interpreter['properties']['zeppelin.jdbc.principal'] = ""
interpreter['properties']['zeppelin.jdbc.keytab.location'] = ""
elif interpreter['group'] == 'sh':
@@ -354,6 +354,7 @@ class Master(Script):
interpreter['properties']['hive.driver'] = 'org.apache.hive.jdbc.HiveDriver'
interpreter['properties']['hive.user'] = 'hive'
interpreter['properties']['hive.password'] = ''
+ interpreter['properties']['hive.proxy.user.property'] = 'hive.server2.proxy.user'
if params.hive_server2_support_dynamic_service_discovery:
interpreter['properties']['hive.url'] = 'jdbc:hive2://' + \
params.hive_zookeeper_quorum + \
@@ -367,6 +368,7 @@ class Master(Script):
interpreter['properties'][hive_interactive_properties_key + '.driver'] = 'org.apache.hive.jdbc.HiveDriver'
interpreter['properties'][hive_interactive_properties_key + '.user'] = 'hive'
interpreter['properties'][hive_interactive_properties_key + '.password'] = ''
+ interpreter['properties'][hive_interactive_properties_key + '.property'] = 'hive.server2.proxy.user'
if params.hive_server2_support_dynamic_service_discovery:
interpreter['properties'][hive_interactive_properties_key + '.url'] = 'jdbc:hive2://' + \
params.hive_zookeeper_quorum + \