You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by nc...@apache.org on 2016/09/19 15:31:52 UTC
[01/11] ambari git commit: AMBARI-17518. improve resourcemanager HA
description (wang yaoxin via ncole)
Repository: ambari
Updated Branches:
refs/heads/branch-dev-patch-upgrade e2085dd6f -> 79e678653
AMBARI-17518. improve resourcemanager HA description (wang yaoxin via ncole)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/9dbf079d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/9dbf079d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/9dbf079d
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 9dbf079d957b2b449030a1efa95b77614a3f1ea2
Parents: 9dbcac6
Author: Nate Cole <nc...@hortonworks.com>
Authored: Fri Sep 16 11:04:09 2016 -0400
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Fri Sep 16 11:04:09 2016 -0400
----------------------------------------------------------------------
ambari-web/app/models/host_component.js | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/9dbf079d/ambari-web/app/models/host_component.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/models/host_component.js b/ambari-web/app/models/host_component.js
index dca7ff0..11ea659 100644
--- a/ambari-web/app/models/host_component.js
+++ b/ambari-web/app/models/host_component.js
@@ -330,8 +330,8 @@ App.HostComponentActionMap = {
},
TOGGLE_RM_HA: {
action: 'enableRMHighAvailability',
- label: Em.I18n.t('admin.rm_highAvailability.button.enable'),
- cssClass: 'icon-arrow-up',
+ label: App.get('isRMHaEnabled') ? Em.I18n.t('admin.rm_highAvailability.button.disable') : Em.I18n.t('admin.rm_highAvailability.button.enable'),
+ cssClass: App.get('isRMHaEnabled') ? 'icon-arrow-down' : 'icon-arrow-up',
isHidden: App.get('isRMHaEnabled'),
disabled: App.get('isSingleNode') || !RM || RM.get('isNotInstalled')
},
[09/11] ambari git commit: AMBARI-18237. Certain configuration files
cannot be modified through Ambari api. (aonishuk)
Posted by nc...@apache.org.
AMBARI-18237. Certain configuration files cannot be modified through Ambari api. (aonishuk)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/64fc477d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/64fc477d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/64fc477d
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 64fc477df3305a5c09f6114c9b31b06dde600dc4
Parents: 335ef6e
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Mon Sep 19 16:50:20 2016 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Mon Sep 19 16:50:20 2016 +0300
----------------------------------------------------------------------
.../src/test/python/stacks/2.0.6/configs/default.json | 3 +++
.../src/test/python/stacks/2.0.6/configs/secured.json | 3 +++
.../stacks/2.0.6/hooks/before-START/test_before_start.py | 8 ++++----
3 files changed, 10 insertions(+), 4 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/64fc477d/ambari-server/src/test/python/stacks/2.0.6/configs/default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/configs/default.json b/ambari-server/src/test/python/stacks/2.0.6/configs/default.json
index 24493c9..f54b645 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/configs/default.json
+++ b/ambari-server/src/test/python/stacks/2.0.6/configs/default.json
@@ -939,6 +939,9 @@
},
"ams-grafana-ini": {
"content": "\n"
+ },
+ "hadoop-metrics2.properties": {
+ "content": "# Licensed to the Apache Software Foundation (ASF) under one or more\r\n# contributor license agreements. See the NOTICE file distributed with\r\n# this work for additional information regarding copyright ownership.\r\n# The ASF licenses this file to You under the Apache License, Version 2.0\r\n# (the \"License\"); you may not use this file except in compliance with\r\n# the License. You may obtain a copy of the License at\r\n#\r\n# http:\/\/www.apache.org\/licenses\/LICENSE-2.0\r\n#\r\n# Unless required by applicable law or agreed to in writing, software\r\n# distributed under the License is distributed on an \"AS IS\" BASIS,\r\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n# See the License for the specific language governing permissions and\r\n# limitations under the License.\r\n\r\n# syntax: [prefix].[source|sink|jmx].[instance].[options]\r\n# See package.html for org.apache.hadoop.metrics2 for details\r\n\r\n{% if has_gang
lia_server %}\r\n*.period=60\r\n\r\n*.sink.ganglia.class=org.apache.hadoop.metrics2.sink.ganglia.GangliaSink31\r\n*.sink.ganglia.period=10\r\n\r\n# default for supportsparse is false\r\n*.sink.ganglia.supportsparse=true\r\n\r\n.sink.ganglia.slope=jvm.metrics.gcCount=zero,jvm.metrics.memHeapUsedM=both\r\n.sink.ganglia.dmax=jvm.metrics.threadsBlocked=70,jvm.metrics.memHeapUsedM=40\r\n\r\n# Hook up to the server\r\nnamenode.sink.ganglia.servers={{ganglia_server_host}}:8661\r\ndatanode.sink.ganglia.servers={{ganglia_server_host}}:8659\r\njobtracker.sink.ganglia.servers={{ganglia_server_host}}:8662\r\ntasktracker.sink.ganglia.servers={{ganglia_server_host}}:8658\r\nmaptask.sink.ganglia.servers={{ganglia_server_host}}:8660\r\nreducetask.sink.ganglia.servers={{ganglia_server_host}}:8660\r\nresourcemanager.sink.ganglia.servers={{ganglia_server_host}}:8664\r\nnodemanager.sink.ganglia.servers={{ganglia_server_host}}:8657\r\nhistoryserver.sink.ganglia.servers={{ganglia_server_host}}:8666\r\njo
urnalnode.sink.ganglia.servers={{ganglia_server_host}}:8654\r\nnimbus.sink.ganglia.servers={{ganglia_server_host}}:8649\r\nsupervisor.sink.ganglia.servers={{ganglia_server_host}}:8650\r\n\r\nresourcemanager.sink.ganglia.tagsForPrefix.yarn=Queue\r\n\r\n{% endif %}\r\n\r\n{% if has_metric_collector %}\r\n\r\n*.period={{metrics_collection_period}}\r\n*.sink.timeline.plugin.urls=file:\/\/\/usr\/lib\/ambari-metrics-hadoop-sink\/ambari-metrics-hadoop-sink.jar\r\n*.sink.timeline.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink\r\n*.sink.timeline.period={{metrics_collection_period}}\r\n*.sink.timeline.sendInterval={{metrics_report_interval}}000\r\n*.sink.timeline.slave.host.name={{hostname}}\r\n*.sink.timeline.zookeeper.quorum={{zookeeper_quorum}}\r\n*.sink.timeline.protocol={{metric_collector_protocol}}\r\n*.sink.timeline.port={{metric_collector_port}}\r\n\r\n# HTTPS properties\r\n*.sink.timeline.truststore.path = {{metric_truststore_path}}\r\n*.sink.timeline.trusts
tore.type = {{metric_truststore_type}}\r\n*.sink.timeline.truststore.password = {{metric_truststore_password}}\r\n\r\ndatanode.sink.timeline.collector={{metric_collector_hosts}}\r\nnamenode.sink.timeline.collector={{metric_collector_hosts}}\r\nresourcemanager.sink.timeline.collector={{metric_collector_hosts}}\r\nnodemanager.sink.timeline.collector={{metric_collector_hosts}}\r\njobhistoryserver.sink.timeline.collector={{metric_collector_hosts}}\r\njournalnode.sink.timeline.collector={{metric_collector_hosts}}\r\nmaptask.sink.timeline.collector={{metric_collector_hosts}}\r\nreducetask.sink.timeline.collector={{metric_collector_hosts}}\r\napplicationhistoryserver.sink.timeline.collector={{metric_collector_hosts}}\r\n\r\nresourcemanager.sink.timeline.tagsForPrefix.yarn=Queue\r\n\r\n{% if is_nn_client_port_configured %}\r\n# Namenode rpc ports customization\r\nnamenode.sink.timeline.metric.rpc.client.port={{nn_rpc_client_port}}\r\n{% endif %}\r\n{% if is_nn_dn_port_configured %}\r\nnamen
ode.sink.timeline.metric.rpc.datanode.port={{nn_rpc_dn_port}}\r\n{% endif %}\r\n{% if is_nn_healthcheck_port_configured %}\r\nnamenode.sink.timeline.metric.rpc.healthcheck.port={{nn_rpc_healthcheck_port}}\r\n{% endif %}\r\n\r\n{% endif %}"
}
},
"configuration_attributes": {
http://git-wip-us.apache.org/repos/asf/ambari/blob/64fc477d/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json b/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json
index 3dcf1e9..890b9f1 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json
+++ b/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json
@@ -827,6 +827,9 @@
"SSL_TRUSTSTORE_FILE_PATH": "/etc/hadoop/conf/ranger-plugin-truststore.jks",
"REPOSITORY_CONFIG_PASSWORD": "hadoop",
"XAAUDIT.SOLR.MAX_QUEUE_SIZE": "1"
+ },
+ "hadoop-metrics2.properties": {
+ "content": "# Licensed to the Apache Software Foundation (ASF) under one or more\r\n# contributor license agreements. See the NOTICE file distributed with\r\n# this work for additional information regarding copyright ownership.\r\n# The ASF licenses this file to You under the Apache License, Version 2.0\r\n# (the \"License\"); you may not use this file except in compliance with\r\n# the License. You may obtain a copy of the License at\r\n#\r\n# http:\/\/www.apache.org\/licenses\/LICENSE-2.0\r\n#\r\n# Unless required by applicable law or agreed to in writing, software\r\n# distributed under the License is distributed on an \"AS IS\" BASIS,\r\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n# See the License for the specific language governing permissions and\r\n# limitations under the License.\r\n\r\n# syntax: [prefix].[source|sink|jmx].[instance].[options]\r\n# See package.html for org.apache.hadoop.metrics2 for details\r\n\r\n{% if has_gang
lia_server %}\r\n*.period=60\r\n\r\n*.sink.ganglia.class=org.apache.hadoop.metrics2.sink.ganglia.GangliaSink31\r\n*.sink.ganglia.period=10\r\n\r\n# default for supportsparse is false\r\n*.sink.ganglia.supportsparse=true\r\n\r\n.sink.ganglia.slope=jvm.metrics.gcCount=zero,jvm.metrics.memHeapUsedM=both\r\n.sink.ganglia.dmax=jvm.metrics.threadsBlocked=70,jvm.metrics.memHeapUsedM=40\r\n\r\n# Hook up to the server\r\nnamenode.sink.ganglia.servers={{ganglia_server_host}}:8661\r\ndatanode.sink.ganglia.servers={{ganglia_server_host}}:8659\r\njobtracker.sink.ganglia.servers={{ganglia_server_host}}:8662\r\ntasktracker.sink.ganglia.servers={{ganglia_server_host}}:8658\r\nmaptask.sink.ganglia.servers={{ganglia_server_host}}:8660\r\nreducetask.sink.ganglia.servers={{ganglia_server_host}}:8660\r\nresourcemanager.sink.ganglia.servers={{ganglia_server_host}}:8664\r\nnodemanager.sink.ganglia.servers={{ganglia_server_host}}:8657\r\nhistoryserver.sink.ganglia.servers={{ganglia_server_host}}:8666\r\njo
urnalnode.sink.ganglia.servers={{ganglia_server_host}}:8654\r\nnimbus.sink.ganglia.servers={{ganglia_server_host}}:8649\r\nsupervisor.sink.ganglia.servers={{ganglia_server_host}}:8650\r\n\r\nresourcemanager.sink.ganglia.tagsForPrefix.yarn=Queue\r\n\r\n{% endif %}\r\n\r\n{% if has_metric_collector %}\r\n\r\n*.period={{metrics_collection_period}}\r\n*.sink.timeline.plugin.urls=file:\/\/\/usr\/lib\/ambari-metrics-hadoop-sink\/ambari-metrics-hadoop-sink.jar\r\n*.sink.timeline.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink\r\n*.sink.timeline.period={{metrics_collection_period}}\r\n*.sink.timeline.sendInterval={{metrics_report_interval}}000\r\n*.sink.timeline.slave.host.name={{hostname}}\r\n*.sink.timeline.zookeeper.quorum={{zookeeper_quorum}}\r\n*.sink.timeline.protocol={{metric_collector_protocol}}\r\n*.sink.timeline.port={{metric_collector_port}}\r\n\r\n# HTTPS properties\r\n*.sink.timeline.truststore.path = {{metric_truststore_path}}\r\n*.sink.timeline.trusts
tore.type = {{metric_truststore_type}}\r\n*.sink.timeline.truststore.password = {{metric_truststore_password}}\r\n\r\ndatanode.sink.timeline.collector={{metric_collector_hosts}}\r\nnamenode.sink.timeline.collector={{metric_collector_hosts}}\r\nresourcemanager.sink.timeline.collector={{metric_collector_hosts}}\r\nnodemanager.sink.timeline.collector={{metric_collector_hosts}}\r\njobhistoryserver.sink.timeline.collector={{metric_collector_hosts}}\r\njournalnode.sink.timeline.collector={{metric_collector_hosts}}\r\nmaptask.sink.timeline.collector={{metric_collector_hosts}}\r\nreducetask.sink.timeline.collector={{metric_collector_hosts}}\r\napplicationhistoryserver.sink.timeline.collector={{metric_collector_hosts}}\r\n\r\nresourcemanager.sink.timeline.tagsForPrefix.yarn=Queue\r\n\r\n{% if is_nn_client_port_configured %}\r\n# Namenode rpc ports customization\r\nnamenode.sink.timeline.metric.rpc.client.port={{nn_rpc_client_port}}\r\n{% endif %}\r\n{% if is_nn_dn_port_configured %}\r\nnamen
ode.sink.timeline.metric.rpc.datanode.port={{nn_rpc_dn_port}}\r\n{% endif %}\r\n{% if is_nn_healthcheck_port_configured %}\r\nnamenode.sink.timeline.metric.rpc.healthcheck.port={{nn_rpc_healthcheck_port}}\r\n{% endif %}\r\n\r\n{% endif %}"
}
},
"configuration_attributes": {
http://git-wip-us.apache.org/repos/asf/ambari/blob/64fc477d/ambari-server/src/test/python/stacks/2.0.6/hooks/before-START/test_before_start.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/hooks/before-START/test_before_start.py b/ambari-server/src/test/python/stacks/2.0.6/hooks/before-START/test_before_start.py
index c0f279f..6e5561a 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/hooks/before-START/test_before_start.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/hooks/before-START/test_before_start.py
@@ -72,7 +72,7 @@ class TestHookBeforeStart(RMFTestCase):
content='log4jproperties\nline2log4jproperties\nline2'
)
self.assertResourceCalled('File', '/etc/hadoop/conf/hadoop-metrics2.properties',
- content = Template('hadoop-metrics2.properties.j2'),
+ content = InlineTemplate(self.getConfig()['configurations']['hadoop-metrics2.properties']['content']),
group='hadoop',
owner = 'hdfs',
)
@@ -146,7 +146,7 @@ class TestHookBeforeStart(RMFTestCase):
content='log4jproperties\nline2log4jproperties\nline2'
)
self.assertResourceCalled('File', '/etc/hadoop/conf/hadoop-metrics2.properties',
- content = Template('hadoop-metrics2.properties.j2'),
+ content = InlineTemplate(self.getConfig()['configurations']['hadoop-metrics2.properties']['content']),
group='hadoop',
owner = 'hdfs',
)
@@ -225,7 +225,7 @@ class TestHookBeforeStart(RMFTestCase):
content='log4jproperties\nline2log4jproperties\nline2'
)
self.assertResourceCalled('File', '/etc/hadoop/conf/hadoop-metrics2.properties',
- content = Template('hadoop-metrics2.properties.j2'),
+ content = InlineTemplate(self.getConfig()['configurations']['hadoop-metrics2.properties']['content']),
group='hadoop',
owner = 'hdfs',
)
@@ -306,7 +306,7 @@ class TestHookBeforeStart(RMFTestCase):
content='log4jproperties\nline2log4jproperties\nline2'
)
self.assertResourceCalled('File', '/etc/hadoop/conf/hadoop-metrics2.properties',
- content = Template('hadoop-metrics2.properties.j2'),
+ content = InlineTemplate(self.getConfig()['configurations']['hadoop-metrics2.properties']['content']),
group='hadoop',
owner = 'hdfs',
)
[03/11] ambari git commit: AMBARI-17456 : Support round-robin
scheduling with failover for monitors with distributed collector - Bug fix
(avijayan)
Posted by nc...@apache.org.
AMBARI-17456 : Support round-robin scheduling with failover for monitors with distributed collector - Bug fix (avijayan)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/54a6525f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/54a6525f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/54a6525f
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 54a6525f8c49759fe9a013b35ce476693d1e855f
Parents: a418575
Author: Aravindan Vijayan <av...@hortonworks.com>
Authored: Fri Sep 16 13:23:21 2016 -0700
Committer: Aravindan Vijayan <av...@hortonworks.com>
Committed: Fri Sep 16 13:23:21 2016 -0700
----------------------------------------------------------------------
.../src/main/python/core/config_reader.py | 12 ++++++++++--
.../0.1.0/package/templates/metric_monitor.ini.j2 | 1 +
2 files changed, 11 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/54a6525f/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/config_reader.py
----------------------------------------------------------------------
diff --git a/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/config_reader.py b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/config_reader.py
index 2e8a170..3ca3a31 100644
--- a/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/config_reader.py
+++ b/ambari-metrics/ambari-metrics-host-monitoring/src/main/python/core/config_reader.py
@@ -22,6 +22,7 @@ import ConfigParser
import StringIO
import json
import os
+import ast
from ambari_commons import OSConst
from ambari_commons.os_family_impl import OsFamilyImpl
@@ -100,7 +101,7 @@ AMBARI_AGENT_CONF = '/etc/ambari-agent/conf/ambari-agent.ini'
config_content = """
[default]
debug_level = INFO
-metrics_servers = localhost,host1,host2
+metrics_servers = ['localhost','host1','host2']
enable_time_threshold = false
enable_value_threshold = false
@@ -210,8 +211,15 @@ class Configuration:
def get_collector_sleep_interval(self):
return int(self.get("collector", "collector_sleep_interval", 10))
+ def get_hostname_config(self):
+ return self.get("default", "hostname", None)
+
def get_metrics_collector_hosts(self):
- return self.get("default", "metrics_servers", "localhost").split(",")
+ hosts = self.get("default", "metrics_servers", "localhost")
+ if hosts is not "localhost":
+ return ast.literal_eval(hosts)
+ else:
+ return hosts
def get_failover_strategy(self):
return self.get("collector", "failover_strategy", ROUND_ROBIN_FAILOVER_STRATEGY)
http://git-wip-us.apache.org/repos/asf/ambari/blob/54a6525f/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/metric_monitor.ini.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/metric_monitor.ini.j2 b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/metric_monitor.ini.j2
index b011fd5..3823912 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/metric_monitor.ini.j2
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/templates/metric_monitor.ini.j2
@@ -18,6 +18,7 @@
[default]
debug_level = INFO
+hostname = {{hostname}}
metrics_servers = {{ams_collector_hosts}}
enable_time_threshold = false
enable_value_threshold = false
[06/11] ambari git commit: AMBARI-18374. rolling restart datanode
cluster name show null (Wang Yaoxin via magyari_sandor)
Posted by nc...@apache.org.
AMBARI-18374. rolling restart datanode cluster name show null (Wang Yaoxin via magyari_sandor)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/fd02688f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/fd02688f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/fd02688f
Branch: refs/heads/branch-dev-patch-upgrade
Commit: fd02688f27afa6e4607f3694dee5e5643e8e2b72
Parents: 1fdda49
Author: Wang Yaoxin <18...@139.com>
Authored: Wed Sep 14 15:36:56 2016 +0530
Committer: Sandor Magyari <sm...@hortonworks.com>
Committed: Mon Sep 19 11:54:34 2016 +0200
----------------------------------------------------------------------
.../eventcreator/RequestEventCreator.java | 30 ++++++++++++++++++--
.../creator/RequestEventCreatorTest.java | 27 ++++++++++++++++++
2 files changed, 54 insertions(+), 3 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/fd02688f/ambari-server/src/main/java/org/apache/ambari/server/audit/request/eventcreator/RequestEventCreator.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/audit/request/eventcreator/RequestEventCreator.java b/ambari-server/src/main/java/org/apache/ambari/server/audit/request/eventcreator/RequestEventCreator.java
index 075e328..f0ea4ec 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/audit/request/eventcreator/RequestEventCreator.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/audit/request/eventcreator/RequestEventCreator.java
@@ -27,7 +27,8 @@ import org.apache.ambari.server.audit.event.AuditEvent;
import org.apache.ambari.server.audit.event.request.AddRequestRequestAuditEvent;
import org.apache.ambari.server.controller.internal.RequestOperationLevel;
import org.apache.ambari.server.controller.spi.Resource;
-
+import java.util.Map;
+import org.apache.ambari.server.controller.internal.RequestResourceProvider;
import com.google.common.collect.ImmutableSet;
/**
@@ -87,10 +88,33 @@ public class RequestEventCreator implements RequestAuditEventCreator {
.withUrl(request.getURI())
.withRemoteIp(request.getRemoteAddress())
.withCommand(request.getBody().getRequestInfoProperties().get("command"))
- .withClusterName(request.getBody().getRequestInfoProperties().get(RequestOperationLevel.OPERATION_CLUSTER_ID))
- .build();
+ .withClusterName(getClusterName(request, RequestOperationLevel.OPERATION_CLUSTER_ID))
+ .build();
default:
return null;
}
}
+ /**
+ *Returns clusterName from the request based on the propertyName parameter
+ *@param request
+ *@param propertyName
+ *@return
+ */
+ private String getClusterName(Request request ,String propertyName) {
+ Map<String, String> requestInfoProps = request.getBody().getRequestInfoProperties();
+ return requestInfoProps.containsKey(propertyName)?requestInfoProps.get(propertyName):getProperty(request, RequestResourceProvider.REQUEST_CLUSTER_NAME_PROPERTY_ID);
+ }
+
+ /**
+ *Returns property from the request based on the propertyName parameter
+ *@param request
+ *@param propertyName
+ *@return
+ */
+ private String getProperty(Request request, String propertyName) {
+ if (!request.getBody().getPropertySets().isEmpty()) {
+ return String.valueOf(request.getBody().getPropertySets().iterator().next().get(propertyName));
+ }
+ return null;
+ }
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/fd02688f/ambari-server/src/test/java/org/apache/ambari/server/audit/request/creator/RequestEventCreatorTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/audit/request/creator/RequestEventCreatorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/audit/request/creator/RequestEventCreatorTest.java
index 1074f0e..ddb42a7 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/audit/request/creator/RequestEventCreatorTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/audit/request/creator/RequestEventCreatorTest.java
@@ -19,7 +19,11 @@
package org.apache.ambari.server.audit.request.creator;
import junit.framework.Assert;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Set;
+import org.apache.ambari.server.api.services.NamedPropertySet;
import org.apache.ambari.server.api.services.Request;
import org.apache.ambari.server.api.services.Result;
import org.apache.ambari.server.api.services.ResultStatus;
@@ -27,6 +31,7 @@ import org.apache.ambari.server.audit.event.AuditEvent;
import org.apache.ambari.server.audit.event.request.AddRequestRequestAuditEvent;
import org.apache.ambari.server.audit.request.eventcreator.RequestEventCreator;
import org.apache.ambari.server.controller.internal.RequestOperationLevel;
+import org.apache.ambari.server.controller.internal.RequestResourceProvider;
import org.apache.ambari.server.controller.spi.Resource;
import org.junit.Test;
@@ -50,4 +55,26 @@ public class RequestEventCreatorTest extends AuditEventCreatorTestBase{
Assert.assertEquals(expected, actual);
Assert.assertTrue(actual.contains(userName));
}
+ @Test
+ public void postScheduleTest() {
+ RequestEventCreator creator = new RequestEventCreator();
+
+ Request request = AuditEventCreatorTestHelper.createRequest(Request.Type.POST, Resource.Type.Request, null, null);
+ Result result = AuditEventCreatorTestHelper.createResult(new ResultStatus(ResultStatus.STATUS.OK));
+ request.getBody().addRequestInfoProperty("command", "MyCommand");
+
+ Map<String, Object> mapProperties = new HashMap<String, Object>();
+ mapProperties.put(RequestResourceProvider.REQUEST_CLUSTER_NAME_PROPERTY_ID, "mycluster");
+ NamedPropertySet namedPropSet = new NamedPropertySet("", mapProperties);
+ request.getBody().addPropertySet(namedPropSet);
+
+ AuditEvent event = AuditEventCreatorTestHelper.getEvent(creator, request, result);
+ String actual = event.getAuditMessage();
+ String expected = "User(" + userName + "), RemoteIp(1.2.3.4), Operation(Request from server), RequestType(POST), url(http://example.com:8080/api/v1/test), ResultStatus(200 OK), Command(MyCommand), Cluster name(mycluster)";
+
+ Assert.assertTrue("Class mismatch", event instanceof AddRequestRequestAuditEvent);
+ Assert.assertEquals(expected, actual);
+ Assert.assertTrue(actual.contains(userName));
+ }
+
}
[11/11] ambari git commit: Merge branch 'trunk' into
branch-dev-patch-upgrade
Posted by nc...@apache.org.
Merge branch 'trunk' into branch-dev-patch-upgrade
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/79e67865
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/79e67865
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/79e67865
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 79e678653eff30ca07cc5c76ee6901d763a7566d
Parents: e2085dd d1ac8bd
Author: Nate Cole <nc...@hortonworks.com>
Authored: Mon Sep 19 11:31:36 2016 -0400
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Mon Sep 19 11:31:36 2016 -0400
----------------------------------------------------------------------
.../src/main/python/ambari_agent/Controller.py | 18 ++-
.../src/main/python/core/config_reader.py | 12 +-
.../eventcreator/RequestEventCreator.java | 30 ++++-
.../ambari/server/stack/ComponentModule.java | 3 +
.../ambari/server/state/ComponentInfo.java | 2 +-
.../package/templates/metric_monitor.ini.j2 | 1 +
.../hadoop-metrics2.properties.xml | 125 +++++++++++++++++++
.../2.0.6/hooks/before-START/scripts/params.py | 3 +-
.../scripts/shared_initialization.py | 18 ++-
.../creator/RequestEventCreatorTest.java | 27 ++++
.../server/stack/ComponentModuleTest.java | 15 ++-
.../python/stacks/2.0.6/configs/default.json | 3 +
.../python/stacks/2.0.6/configs/secured.json | 3 +
.../hooks/before-START/test_before_start.py | 8 +-
ambari-web/app/models/host_component.js | 4 +-
.../8.0.0/package/scripts/service_check.py | 1 +
.../MICROSOFT_R/8.0.0/role_command_order.json | 6 +
contrib/version-builder/example.py | 4 +-
contrib/version-builder/example.sh | 4 +-
contrib/version-builder/version_builder.py | 12 +-
.../ambari/view/hive2/ConnectionSystem.java | 23 ++--
21 files changed, 281 insertions(+), 41 deletions(-)
----------------------------------------------------------------------
[07/11] ambari git commit: AMBARI-18237. Certain configuration files
cannot be modified through Ambari api. (aonishuk)
Posted by nc...@apache.org.
AMBARI-18237. Certain configuration files cannot be modified through Ambari api. (aonishuk)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8ce129be
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8ce129be
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8ce129be
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 8ce129becf35f7e64e3f2009028765a243f4a3fc
Parents: fd02688
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Mon Sep 19 13:27:37 2016 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Mon Sep 19 13:27:37 2016 +0300
----------------------------------------------------------------------
.../hadoop-metrics2.properties.xml | 125 +++++++++++++++++++
.../2.0.6/hooks/before-START/scripts/params.py | 3 +-
.../scripts/shared_initialization.py | 2 +-
3 files changed, 128 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/8ce129be/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hadoop-metrics2.properties.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hadoop-metrics2.properties.xml b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hadoop-metrics2.properties.xml
new file mode 100644
index 0000000..2b9964b
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hadoop-metrics2.properties.xml
@@ -0,0 +1,125 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+
+<configuration>
+ <!-- hadoop-metrics2.properties -->
+ <property>
+ <name>content</name>
+ <display-name>hadoop-metrics2.properties template</display-name>
+ <description>This is the jinja template for hadoop-metrics2.properties file</description>
+ <value>
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# syntax: [prefix].[source|sink|jmx].[instance].[options]
+# See package.html for org.apache.hadoop.metrics2 for details
+
+{% if has_ganglia_server %}
+*.period=60
+
+*.sink.ganglia.class=org.apache.hadoop.metrics2.sink.ganglia.GangliaSink31
+*.sink.ganglia.period=10
+
+# default for supportsparse is false
+*.sink.ganglia.supportsparse=true
+
+.sink.ganglia.slope=jvm.metrics.gcCount=zero,jvm.metrics.memHeapUsedM=both
+.sink.ganglia.dmax=jvm.metrics.threadsBlocked=70,jvm.metrics.memHeapUsedM=40
+
+# Hook up to the server
+namenode.sink.ganglia.servers={{ganglia_server_host}}:8661
+datanode.sink.ganglia.servers={{ganglia_server_host}}:8659
+jobtracker.sink.ganglia.servers={{ganglia_server_host}}:8662
+tasktracker.sink.ganglia.servers={{ganglia_server_host}}:8658
+maptask.sink.ganglia.servers={{ganglia_server_host}}:8660
+reducetask.sink.ganglia.servers={{ganglia_server_host}}:8660
+resourcemanager.sink.ganglia.servers={{ganglia_server_host}}:8664
+nodemanager.sink.ganglia.servers={{ganglia_server_host}}:8657
+historyserver.sink.ganglia.servers={{ganglia_server_host}}:8666
+journalnode.sink.ganglia.servers={{ganglia_server_host}}:8654
+nimbus.sink.ganglia.servers={{ganglia_server_host}}:8649
+supervisor.sink.ganglia.servers={{ganglia_server_host}}:8650
+
+resourcemanager.sink.ganglia.tagsForPrefix.yarn=Queue
+
+{% endif %}
+
+{% if has_metric_collector %}
+
+*.period={{metrics_collection_period}}
+*.sink.timeline.plugin.urls=file:///usr/lib/ambari-metrics-hadoop-sink/ambari-metrics-hadoop-sink.jar
+*.sink.timeline.class=org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink
+*.sink.timeline.period={{metrics_collection_period}}
+*.sink.timeline.sendInterval={{metrics_report_interval}}000
+*.sink.timeline.slave.host.name={{hostname}}
+*.sink.timeline.zookeeper.quorum={{zookeeper_quorum}}
+*.sink.timeline.protocol={{metric_collector_protocol}}
+*.sink.timeline.port={{metric_collector_port}}
+
+# HTTPS properties
+*.sink.timeline.truststore.path = {{metric_truststore_path}}
+*.sink.timeline.truststore.type = {{metric_truststore_type}}
+*.sink.timeline.truststore.password = {{metric_truststore_password}}
+
+datanode.sink.timeline.collector={{metric_collector_hosts}}
+namenode.sink.timeline.collector={{metric_collector_hosts}}
+resourcemanager.sink.timeline.collector={{metric_collector_hosts}}
+nodemanager.sink.timeline.collector={{metric_collector_hosts}}
+jobhistoryserver.sink.timeline.collector={{metric_collector_hosts}}
+journalnode.sink.timeline.collector={{metric_collector_hosts}}
+maptask.sink.timeline.collector={{metric_collector_hosts}}
+reducetask.sink.timeline.collector={{metric_collector_hosts}}
+applicationhistoryserver.sink.timeline.collector={{metric_collector_hosts}}
+
+resourcemanager.sink.timeline.tagsForPrefix.yarn=Queue
+
+{% if is_nn_client_port_configured %}
+# Namenode rpc ports customization
+namenode.sink.timeline.metric.rpc.client.port={{nn_rpc_client_port}}
+{% endif %}
+{% if is_nn_dn_port_configured %}
+namenode.sink.timeline.metric.rpc.datanode.port={{nn_rpc_dn_port}}
+{% endif %}
+{% if is_nn_healthcheck_port_configured %}
+namenode.sink.timeline.metric.rpc.healthcheck.port={{nn_rpc_healthcheck_port}}
+{% endif %}
+
+{% endif %}
+ </value>
+ <value-attributes>
+ <type>content</type>
+ </value-attributes>
+ <on-ambari-upgrade add="true"/>
+ </property>
+</configuration>
http://git-wip-us.apache.org/repos/asf/ambari/blob/8ce129be/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
index 49a14d0..45eab2f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
@@ -40,11 +40,12 @@ stack_version_formatted = format_stack_version(stack_version_unformatted)
dfs_type = default("/commandParams/dfs_type", "")
hadoop_conf_dir = "/etc/hadoop/conf"
-
component_list = default("/localComponents", [])
hdfs_tmp_dir = config['configurations']['hadoop-env']['hdfs_tmp_dir']
+hadoop_metrics2_properties_content = config['configurations']['hadoop-metrics2.properties']['content']
+
# hadoop default params
mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
http://git-wip-us.apache.org/repos/asf/ambari/blob/8ce129be/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
index ba9c8fb..ff52b31 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
@@ -101,7 +101,7 @@ def setup_hadoop():
File(os.path.join(params.hadoop_conf_dir, "hadoop-metrics2.properties"),
owner=params.hdfs_user,
group=params.user_group,
- content=Template("hadoop-metrics2.properties.j2")
+ content=InlineTemplate(params.hadoop_metrics2_properties_content)
)
if params.dfs_type == 'HCFS' and params.has_core_site and 'ECS_CLIENT' in params.component_list:
[10/11] ambari git commit: AMBARI-18415. Stack definition: service
component 'cardinality' inheritance not working properly (aonishuk)
Posted by nc...@apache.org.
AMBARI-18415. Stack definition: service component 'cardinality' inheritance not working properly (aonishuk)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d1ac8bd8
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d1ac8bd8
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d1ac8bd8
Branch: refs/heads/branch-dev-patch-upgrade
Commit: d1ac8bd89403047a145b185b78c2073f09264472
Parents: 64fc477
Author: Andrew Onishuk <ao...@hortonworks.com>
Authored: Mon Sep 19 16:54:45 2016 +0300
Committer: Andrew Onishuk <ao...@hortonworks.com>
Committed: Mon Sep 19 16:54:45 2016 +0300
----------------------------------------------------------------------
.../apache/ambari/server/stack/ComponentModule.java | 3 +++
.../apache/ambari/server/state/ComponentInfo.java | 2 +-
.../ambari/server/stack/ComponentModuleTest.java | 15 ++++++++++-----
3 files changed, 14 insertions(+), 6 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/d1ac8bd8/ambari-server/src/main/java/org/apache/ambari/server/stack/ComponentModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/ComponentModule.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/ComponentModule.java
index 537ae32..a90ea1a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/stack/ComponentModule.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/ComponentModule.java
@@ -122,6 +122,9 @@ public class ComponentModule extends BaseModule<ComponentModule, ComponentInfo>
componentInfo.getCustomCommands());
mergeLogs(parentInfo.getLogs(), componentInfo.getLogs());
+ } else {
+ //set cardinality with default value "0+" if it was not provided and parent is absent.
+ componentInfo.setCardinality("0+");
}
}
http://git-wip-us.apache.org/repos/asf/ambari/blob/d1ac8bd8/ambari-server/src/main/java/org/apache/ambari/server/state/ComponentInfo.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ComponentInfo.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ComponentInfo.java
index 2dae526..1e494b4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/ComponentInfo.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ComponentInfo.java
@@ -36,7 +36,7 @@ public class ComponentInfo {
private String displayName;
private String category;
private boolean deleted;
- private String cardinality = "0+";
+ private String cardinality;
@XmlElement(name="versionAdvertised")
private Boolean versionAdvertisedField;
http://git-wip-us.apache.org/repos/asf/ambari/blob/d1ac8bd8/ambari-server/src/test/java/org/apache/ambari/server/stack/ComponentModuleTest.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/java/org/apache/ambari/server/stack/ComponentModuleTest.java b/ambari-server/src/test/java/org/apache/ambari/server/stack/ComponentModuleTest.java
index 905707c..519e5f3 100644
--- a/ambari-server/src/test/java/org/apache/ambari/server/stack/ComponentModuleTest.java
+++ b/ambari-server/src/test/java/org/apache/ambari/server/stack/ComponentModuleTest.java
@@ -155,11 +155,14 @@ public class ComponentModuleTest {
String cardinality = "foo";
ComponentInfo info = new ComponentInfo();
- ComponentInfo parentInfo = new ComponentInfo();
+ // parent is null, child cardinality is null
+ assertEquals("0+", resolveComponent(info, null).getModuleInfo().getCardinality());
+ ComponentInfo parentInfo = new ComponentInfo();
+ info = new ComponentInfo();
// parent has value set, child value is null
parentInfo.setCardinality(cardinality);
- assertEquals("0+", resolveComponent(info, parentInfo).getModuleInfo().getCardinality());
+ assertEquals("foo", resolveComponent(info, parentInfo).getModuleInfo().getCardinality());
// child has value set, parent value is null
info.setCardinality(cardinality);
@@ -552,10 +555,12 @@ public class ComponentModuleTest {
private ComponentModule resolveComponent(ComponentInfo info, ComponentInfo parentInfo) {
info.setName("FOO");
- parentInfo.setName("FOO");
-
ComponentModule component = new ComponentModule(info);
- ComponentModule parentComponent = new ComponentModule(parentInfo);
+ ComponentModule parentComponent = null;
+ if (parentInfo != null) {
+ parentInfo.setName("FOO");
+ parentComponent = new ComponentModule(parentInfo);
+ }
component.resolve(parentComponent, Collections.<String, StackModule>emptyMap(), Collections.<String, ServiceModule>emptyMap(), Collections.<String, ExtensionModule>emptyMap());
[04/11] ambari git commit: AMBARI-18413: RecoveryManager may be
logging too often
Posted by nc...@apache.org.
AMBARI-18413: RecoveryManager may be logging too often
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/76ee1b11
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/76ee1b11
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/76ee1b11
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 76ee1b114e233288ebb849820e707a6afefcd3cc
Parents: 54a6525
Author: Nahappan Somasundaram <ns...@hortonworks.com>
Authored: Fri Sep 16 12:00:56 2016 -0700
Committer: Nahappan Somasundaram <ns...@hortonworks.com>
Committed: Fri Sep 16 14:36:24 2016 -0700
----------------------------------------------------------------------
.../src/main/python/ambari_agent/Controller.py | 18 ++++++++++++------
1 file changed, 12 insertions(+), 6 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/76ee1b11/ambari-agent/src/main/python/ambari_agent/Controller.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/Controller.py b/ambari-agent/src/main/python/ambari_agent/Controller.py
index a05011a..7100656 100644
--- a/ambari-agent/src/main/python/ambari_agent/Controller.py
+++ b/ambari-agent/src/main/python/ambari_agent/Controller.py
@@ -268,6 +268,10 @@ class Controller(threading.Thread):
# we log the message at the same interval as 'state interval'
heartbeat_running_msg_timestamp = 0.0
+ # Prevent excessive logging by logging only at specific intervals
+ getrecoverycommands_timestamp = 0.0
+ getrecoverycommands_interval = self.netutil.HEARTBEAT_IDLE_INTERVAL_DEFAULT_MAX_SEC
+
while not self.DEBUG_STOP_HEARTBEATING:
heartbeat_interval = self.netutil.HEARTBEAT_IDLE_INTERVAL_DEFAULT_MAX_SEC
@@ -362,12 +366,14 @@ class Controller(threading.Thread):
# try storing execution command details and desired state
self.addToStatusQueue(response['statusCommands'])
- if not self.actionQueue.tasks_in_progress_or_pending():
- recovery_commands = self.recovery_manager.get_recovery_commands()
- for recovery_command in recovery_commands:
- logger.info("Adding recovery command %s for component %s",
- recovery_command['roleCommand'], recovery_command['role'])
- self.addToQueue([recovery_command])
+ if crt_time - getrecoverycommands_timestamp > int(getrecoverycommands_interval):
+ getrecoverycommands_timestamp = crt_time
+ if not self.actionQueue.tasks_in_progress_or_pending():
+ recovery_commands = self.recovery_manager.get_recovery_commands()
+ for recovery_command in recovery_commands:
+ logger.info("Adding recovery command %s for component %s",
+ recovery_command['roleCommand'], recovery_command['role'])
+ self.addToQueue([recovery_command])
if 'alertDefinitionCommands' in response_keys:
self.alert_scheduler_handler.update_definitions(response)
[02/11] ambari git commit: AMBARI-18397. Update version-builder to
include unique element (dlysnichenko)
Posted by nc...@apache.org.
AMBARI-18397. Update version-builder to include unique element (dlysnichenko)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a4185754
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a4185754
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a4185754
Branch: refs/heads/branch-dev-patch-upgrade
Commit: a4185754b6720a313c7f9c5df8b6311d9e60fc84
Parents: 9dbf079
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Fri Sep 16 19:10:45 2016 +0300
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Fri Sep 16 19:10:45 2016 +0300
----------------------------------------------------------------------
contrib/version-builder/example.py | 4 ++--
contrib/version-builder/example.sh | 4 ++--
contrib/version-builder/version_builder.py | 12 +++++++++---
3 files changed, 13 insertions(+), 7 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/a4185754/contrib/version-builder/example.py
----------------------------------------------------------------------
diff --git a/contrib/version-builder/example.py b/contrib/version-builder/example.py
index b9cdf56..8c7be5f 100644
--- a/contrib/version-builder/example.py
+++ b/contrib/version-builder/example.py
@@ -28,8 +28,8 @@ def main(args):
vb.add_manifest("HDFS-271", "HDFS", "2.7.1.2.4.0")
- vb.add_repo("redhat6", "HDP-2.4", "HDP", "http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.4.2.0")
- vb.add_repo("redhat6", "HDP-UTILS-1.1.0.20", "HDP-UTILS", "http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6")
+ vb.add_repo("redhat6", "HDP-2.4", "HDP", "http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.4.2.0", "true")
+ vb.add_repo("redhat6", "HDP-UTILS-1.1.0.20", "HDP-UTILS", "http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6", "false")
vb.persist()
vb.finalize("../../ambari-server/src/main/resources/version_definition.xsd")
http://git-wip-us.apache.org/repos/asf/ambari/blob/a4185754/contrib/version-builder/example.sh
----------------------------------------------------------------------
diff --git a/contrib/version-builder/example.sh b/contrib/version-builder/example.sh
index 5bf8002..11689cd 100755
--- a/contrib/version-builder/example.sh
+++ b/contrib/version-builder/example.sh
@@ -40,8 +40,8 @@ python version_builder.py --file $filename --available --manifest-id HDFS-271
python version_builder.py --file $filename --os --os-family redhat6 --os-package-version 2_4_1_1_12345
#call any number of times for repo per os
-python version_builder.py --file $filename --repo --repo-os redhat6 --repo-id HDP-2.4 --repo-name HDP --repo-url http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.4.1.1
-python version_builder.py --file $filename --repo --repo-os redhat6 --repo-id HDP-UTILS-1.1.0.20 --repo-name HDP-UTILS --repo-url http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6
+python version_builder.py --file $filename --repo --repo-os redhat6 --repo-id HDP-2.4 --repo-name HDP --repo-url http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.4.1.1 --repo-unique true
+python version_builder.py --file $filename --repo --repo-os redhat6 --repo-id HDP-UTILS-1.1.0.20 --repo-name HDP-UTILS --repo-url http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6 --repo-unique false
python version_builder.py --file $filename --finalize --xsd ../../ambari-server/src/main/resources/version_definition.xsd
http://git-wip-us.apache.org/repos/asf/ambari/blob/a4185754/contrib/version-builder/version_builder.py
----------------------------------------------------------------------
diff --git a/contrib/version-builder/version_builder.py b/contrib/version-builder/version_builder.py
index 2c07b0c..6d1689a 100644
--- a/contrib/version-builder/version_builder.py
+++ b/contrib/version-builder/version_builder.py
@@ -173,7 +173,7 @@ class VersionBuilder:
e = ET.SubElement(service_element, 'component')
e.text = component
- def add_repo(self, os_family, repo_id, repo_name, base_url):
+ def add_repo(self, os_family, repo_id, repo_name, base_url, unique):
"""
Adds a repository
"""
@@ -204,6 +204,10 @@ class VersionBuilder:
e = ET.SubElement(repo_element, 'reponame')
e.text = repo_name
+ if unique is not None:
+ e = ET.SubElement(repo_element, 'unique')
+ e.text = unique
+
def _check_xmllint(self):
"""
@@ -318,7 +322,7 @@ def process_repo(vb, options):
if not options.repo:
return
- vb.add_repo(options.repo_os, options.repo_id, options.repo_name, options.repo_url)
+ vb.add_repo(options.repo_os, options.repo_id, options.repo_name, options.repo_url, options.unique)
def validate_manifest(parser, options):
"""
@@ -426,11 +430,13 @@ def main(argv):
help="The package version to use for the OS")
parser.add_option('--repo', action='store_true', dest='repo',
- help="Add repository data with options: --repo-os, --repo-url, --repo-id, --repo-name")
+ help="Add repository data with options: --repo-os, --repo-url, --repo-id, --repo-name, --repo-unique")
parser.add_option('--repo-os', dest='repo_os',
help="The operating system type: i.e. redhat6, redhat7, debian7, ubuntu12, ubuntu14, ubuntu16, suse11, suse12")
parser.add_option('--repo-url', dest='repo_url',
help="The base url for the repository data")
+ parser.add_option('--repo-unique', dest='unique', type='choice', choices=['true', 'false'],
+ help="Indicates base url should be unique")
parser.add_option('--repo-id', dest='repo_id', help="The ID of the repo")
parser.add_option('--repo-name', dest='repo_name', help="The name of the repo")
[05/11] ambari git commit: AMBARI-18387. Unable to delete Hive view.
(dipayanb)
Posted by nc...@apache.org.
AMBARI-18387. Unable to delete Hive view. (dipayanb)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1fdda491
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1fdda491
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1fdda491
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 1fdda4913adb2318ddfc1406d39fe8d80837a37e
Parents: 76ee1b1
Author: Dipayan Bhowmick <di...@gmail.com>
Authored: Wed Sep 14 15:36:56 2016 +0530
Committer: Dipayan Bhowmick <di...@gmail.com>
Committed: Mon Sep 19 11:23:12 2016 +0530
----------------------------------------------------------------------
.../ambari/view/hive2/ConnectionSystem.java | 23 ++++++++++++--------
1 file changed, 14 insertions(+), 9 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/1fdda491/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/ConnectionSystem.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/ConnectionSystem.java b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/ConnectionSystem.java
index f534130..88ea3d7 100644
--- a/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/ConnectionSystem.java
+++ b/contrib/views/hive-next/src/main/java/org/apache/ambari/view/hive2/ConnectionSystem.java
@@ -24,6 +24,8 @@ import akka.actor.Inbox;
import akka.actor.PoisonPill;
import akka.actor.Props;
import com.google.common.collect.Multimap;
+import com.typesafe.config.Config;
+import com.typesafe.config.ConfigFactory;
import org.apache.ambari.view.ViewContext;
import org.apache.ambari.view.hive2.actor.DeathWatch;
import org.apache.ambari.view.hive2.actor.OperationController;
@@ -45,8 +47,9 @@ public class ConnectionSystem {
private static Map<String, Map<String, ActorRef>> operationControllerMap = new ConcurrentHashMap<>();
private ConnectionSystem() {
- this.actorSystem = ActorSystem.create(ACTOR_SYSTEM_NAME);
- ;
+ ClassLoader classLoader = getClass().getClassLoader();
+ Config config = ConfigFactory.load(classLoader);
+ this.actorSystem = ActorSystem.create(ACTOR_SYSTEM_NAME, config, classLoader);
}
public static ConnectionSystem getInstance() {
@@ -63,8 +66,8 @@ public class ConnectionSystem {
private ActorRef createOperationController(ViewContext context) {
ActorRef deathWatch = actorSystem.actorOf(Props.create(DeathWatch.class));
return actorSystem.actorOf(
- Props.create(OperationController.class, actorSystem, deathWatch, context,
- new ConnectionSupplier(), new DataStorageSupplier(), new HdfsApiSupplier()));
+ Props.create(OperationController.class, actorSystem, deathWatch, context,
+ new ConnectionSupplier(), new DataStorageSupplier(), new HdfsApiSupplier()));
}
public ActorSystem getActorSystem() {
@@ -82,12 +85,12 @@ public class ConnectionSystem {
String instanceName = context.getInstanceName();
ActorRef ref = null;
Map<String, ActorRef> stringActorRefMap = operationControllerMap.get(instanceName);
- if(stringActorRefMap != null) {
+ if (stringActorRefMap != null) {
ref = stringActorRefMap.get(context.getUsername());
}
if (ref == null) {
ref = createOperationController(context);
- if(stringActorRefMap == null) {
+ if (stringActorRefMap == null) {
stringActorRefMap = new HashMap<>();
stringActorRefMap.put(context.getUsername(), ref);
operationControllerMap.put(instanceName, stringActorRefMap);
@@ -100,9 +103,11 @@ public class ConnectionSystem {
public void removeOperationControllerFromCache(String viewInstanceName) {
Map<String, ActorRef> refs = operationControllerMap.remove(viewInstanceName);
- for (ActorRef ref : refs.values()) {
- Inbox inbox = Inbox.create(getActorSystem());
- inbox.send(ref, PoisonPill.getInstance());
+ if (refs != null) {
+ for (ActorRef ref : refs.values()) {
+ Inbox inbox = Inbox.create(getActorSystem());
+ inbox.send(ref, PoisonPill.getInstance());
+ }
}
}
[08/11] ambari git commit: AMBARI-18349. Specify role command order
to make Microsoft-R service check run after YARN is started. (Attila
Doroszlai via stoader)
Posted by nc...@apache.org.
AMBARI-18349. Specify role command order to make Microsoft-R service check run after YARN is started. (Attila Doroszlai via stoader)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/335ef6e9
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/335ef6e9
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/335ef6e9
Branch: refs/heads/branch-dev-patch-upgrade
Commit: 335ef6e9a2c140f79e3ccc11dec703ef9e0afe50
Parents: 8ce129b
Author: Attila Doroszlai <ad...@hortonworks.com>
Authored: Mon Sep 19 15:02:13 2016 +0200
Committer: Toader, Sebastian <st...@hortonworks.com>
Committed: Mon Sep 19 15:02:13 2016 +0200
----------------------------------------------------------------------
.../before-START/scripts/shared_initialization.py | 16 ++++++++++++++++
.../8.0.0/package/scripts/service_check.py | 1 +
.../MICROSOFT_R/8.0.0/role_command_order.json | 6 ++++++
3 files changed, 23 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/335ef6e9/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
index ff52b31..8f845d2 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/shared_initialization.py
@@ -107,6 +107,8 @@ def setup_hadoop():
if params.dfs_type == 'HCFS' and params.has_core_site and 'ECS_CLIENT' in params.component_list:
create_dirs()
+ create_microsoft_r_dir()
+
def setup_configs():
"""
@@ -173,3 +175,17 @@ def create_dirs():
action="execute"
)
+def create_microsoft_r_dir():
+ import params
+ if 'MICROSOFT_R_CLIENT' in params.component_list and params.default_fs:
+ directory = '/user/RevoShare'
+ try:
+ params.HdfsResource(directory,
+ type="directory",
+ action="create_on_execute",
+ owner=params.hdfs_user,
+ mode=0777)
+ params.HdfsResource(None, action="execute")
+ except Exception as exception:
+ Logger.warning("Could not check the existence of {0} on DFS while starting {1}, exception: {2}".format(directory, params.current_service, str(exception)))
+
http://git-wip-us.apache.org/repos/asf/ambari/blob/335ef6e9/contrib/management-packs/microsoft-r_mpack/src/main/resources/common-services/MICROSOFT_R/8.0.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/contrib/management-packs/microsoft-r_mpack/src/main/resources/common-services/MICROSOFT_R/8.0.0/package/scripts/service_check.py b/contrib/management-packs/microsoft-r_mpack/src/main/resources/common-services/MICROSOFT_R/8.0.0/package/scripts/service_check.py
index 547476b..5368e94 100644
--- a/contrib/management-packs/microsoft-r_mpack/src/main/resources/common-services/MICROSOFT_R/8.0.0/package/scripts/service_check.py
+++ b/contrib/management-packs/microsoft-r_mpack/src/main/resources/common-services/MICROSOFT_R/8.0.0/package/scripts/service_check.py
@@ -45,6 +45,7 @@ class MicrosoftRServiceCheckLinux(MicrosoftRServiceCheck):
action="create_on_execute",
owner=params.hdfs_user,
mode=0777)
+ params.HdfsResource(None, action="execute")
except Exception as exception:
Logger.warning("Could not check the existence of /user/RevoShare on HDFS, exception: {0}".format(str(exception)))
http://git-wip-us.apache.org/repos/asf/ambari/blob/335ef6e9/contrib/management-packs/microsoft-r_mpack/src/main/resources/common-services/MICROSOFT_R/8.0.0/role_command_order.json
----------------------------------------------------------------------
diff --git a/contrib/management-packs/microsoft-r_mpack/src/main/resources/common-services/MICROSOFT_R/8.0.0/role_command_order.json b/contrib/management-packs/microsoft-r_mpack/src/main/resources/common-services/MICROSOFT_R/8.0.0/role_command_order.json
new file mode 100755
index 0000000..6edbc2c
--- /dev/null
+++ b/contrib/management-packs/microsoft-r_mpack/src/main/resources/common-services/MICROSOFT_R/8.0.0/role_command_order.json
@@ -0,0 +1,6 @@
+{
+ "general_deps" : {
+ "_comment" : "dependencies for Microsoft R",
+ "MICROSOFT_R_SERVICE_CHECK-SERVICE_CHECK": ["NODEMANAGER-START"]
+ }
+}