You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by jl...@apache.org on 2015/01/07 03:35:59 UTC
[6/8] ambari git commit: AMBARI-8878: Common Services: Refactor
HDP-2.0.6 YARN service (Jayush Luniya)
http://git-wip-us.apache.org/repos/asf/ambari/blob/53c39154/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/alerts/alert_nodemanager_health.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/alerts/alert_nodemanager_health.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/alerts/alert_nodemanager_health.py
new file mode 100644
index 0000000..b1de951
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/alerts/alert_nodemanager_health.py
@@ -0,0 +1,123 @@
+#!/usr/bin/env python
+
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import json
+import socket
+import urllib2
+
+RESULT_CODE_OK = 'OK'
+RESULT_CODE_CRITICAL = 'CRITICAL'
+RESULT_CODE_UNKNOWN = 'UNKNOWN'
+
+NODEMANAGER_HTTP_ADDRESS_KEY = '{{yarn-site/yarn.nodemanager.webapp.address}}'
+NODEMANAGER_HTTPS_ADDRESS_KEY = '{{yarn-site/yarn.nodemanager.webapp.https.address}}'
+YARN_HTTP_POLICY_KEY = '{{yarn-site/yarn.http.policy}}'
+
+OK_MESSAGE = 'NodeManager Healthy'
+CRITICAL_CONNECTION_MESSAGE = 'Connection failed to {0}'
+CRITICAL_NODEMANAGER_STATUS_MESSAGE = 'NodeManager returned an unexpected status of "{0}"'
+CRITICAL_NODEMANAGER_UNKNOWN_JSON_MESSAGE = 'Unable to determine NodeManager health from unexpected JSON response'
+
+NODEMANAGER_DEFAULT_PORT = 8042
+
+def get_tokens():
+ """
+ Returns a tuple of tokens in the format {{site/property}} that will be used
+ to build the dictionary passed into execute
+ """
+ return (NODEMANAGER_HTTP_ADDRESS_KEY,NODEMANAGER_HTTPS_ADDRESS_KEY,
+ YARN_HTTP_POLICY_KEY)
+
+
+def execute(parameters=None, host_name=None):
+ """
+ Returns a tuple containing the result code and a pre-formatted result label
+
+ Keyword arguments:
+ parameters (dictionary): a mapping of parameter key to value
+ host_name (string): the name of this host where the alert is running
+ """
+ result_code = RESULT_CODE_UNKNOWN
+
+ if parameters is None:
+ return (result_code, ['There were no parameters supplied to the script.'])
+
+ scheme = 'http'
+ http_uri = None
+ https_uri = None
+ http_policy = 'HTTP_ONLY'
+
+ if NODEMANAGER_HTTP_ADDRESS_KEY in parameters:
+ http_uri = parameters[NODEMANAGER_HTTP_ADDRESS_KEY]
+
+ if NODEMANAGER_HTTPS_ADDRESS_KEY in parameters:
+ https_uri = parameters[NODEMANAGER_HTTPS_ADDRESS_KEY]
+
+ if YARN_HTTP_POLICY_KEY in parameters:
+ http_policy = parameters[YARN_HTTP_POLICY_KEY]
+
+ # determine the right URI and whether to use SSL
+ uri = http_uri
+ if http_policy == 'HTTPS_ONLY':
+ scheme = 'https'
+
+ if https_uri is not None:
+ uri = https_uri
+
+ label = ''
+ url_response = None
+ node_healthy = 'false'
+ total_time = 0
+
+ # some yarn-site structures don't have the web ui address
+ if uri is None:
+ if host_name is None:
+ host_name = socket.getfqdn()
+
+ uri = '{0}:{1}'.format(host_name, NODEMANAGER_DEFAULT_PORT)
+
+ try:
+ query = "{0}://{1}/ws/v1/node/info".format(scheme,uri)
+
+ # execute the query for the JSON that includes templeton status
+ url_response = urllib2.urlopen(query)
+ except:
+ label = CRITICAL_CONNECTION_MESSAGE.format(uri)
+ return (RESULT_CODE_CRITICAL, [label])
+
+ # URL response received, parse it
+ try:
+ json_response = json.loads(url_response.read())
+ node_healthy = json_response['nodeInfo']['nodeHealthy']
+
+ # convert boolean to string
+ node_healthy = str(node_healthy)
+ except:
+ return (RESULT_CODE_CRITICAL, [query])
+
+ # proper JSON received, compare against known value
+ if node_healthy.lower() == 'true':
+ result_code = RESULT_CODE_OK
+ label = OK_MESSAGE
+ else:
+ result_code = RESULT_CODE_CRITICAL
+ label = CRITICAL_NODEMANAGER_STATUS_MESSAGE.format(node_healthy)
+
+ return (result_code, [label])
http://git-wip-us.apache.org/repos/asf/ambari/blob/53c39154/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/files/validateYarnComponentStatus.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/files/validateYarnComponentStatus.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/files/validateYarnComponentStatus.py
new file mode 100644
index 0000000..33ed8b1
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/files/validateYarnComponentStatus.py
@@ -0,0 +1,170 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+import optparse
+import subprocess
+import json
+
+RESOURCEMANAGER = 'rm'
+NODEMANAGER = 'nm'
+HISTORYSERVER = 'hs'
+
+STARTED_STATE = 'STARTED'
+RUNNING_STATE = 'RUNNING'
+
+#Return reponse for given path and address
+def getResponse(path, address, ssl_enabled):
+
+ command = "curl"
+ httpGssnegotiate = "--negotiate"
+ userpswd = "-u:"
+ insecure = "-k"# This is smoke test, no need to check CA of server
+ if ssl_enabled:
+ url = 'https://' + address + path
+ else:
+ url = 'http://' + address + path
+
+ command_with_flags = [command,httpGssnegotiate,userpswd,insecure,url]
+
+ proc = subprocess.Popen(command_with_flags, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ (stdout, stderr) = proc.communicate()
+ response = json.loads(stdout)
+ if response == None:
+ print 'There is no response for url: ' + str(url)
+ raise Exception('There is no response for url: ' + str(url))
+ return response
+
+#Verify that REST api is available for given component
+def validateAvailability(component, path, addresses, ssl_enabled):
+ responses = {}
+ for address in addresses.split(','):
+ try:
+ responses[address] = getResponse(path, address, ssl_enabled)
+ except Exception as e:
+ print 'Error checking availability status of component.', e
+
+ if not responses:
+ exit(1)
+
+ is_valid = validateAvailabilityResponse(component, responses.values()[0])
+ if not is_valid:
+ exit(1)
+
+#Validate component-specific response
+def validateAvailabilityResponse(component, response):
+ try:
+ if component == RESOURCEMANAGER:
+ rm_state = response['clusterInfo']['state']
+ if rm_state == STARTED_STATE:
+ return True
+ else:
+ print 'Resourcemanager is not started'
+ return False
+
+ elif component == NODEMANAGER:
+ node_healthy = bool(response['nodeInfo']['nodeHealthy'])
+ if node_healthy:
+ return True
+ else:
+ return False
+ elif component == HISTORYSERVER:
+ hs_start_time = response['historyInfo']['startedOn']
+ if hs_start_time > 0:
+ return True
+ else:
+ return False
+ else:
+ return False
+ except Exception as e:
+ print 'Error validation of availability response for ' + str(component), e
+ return False
+
+#Verify that component has required resources to work
+def validateAbility(component, path, addresses, ssl_enabled):
+ responses = {}
+ for address in addresses.split(','):
+ try:
+ responses[address] = getResponse(path, address, ssl_enabled)
+ except Exception as e:
+ print 'Error checking ability of component.', e
+
+ if not responses:
+ exit(1)
+
+ is_valid = validateAbilityResponse(component, responses.values()[0])
+ if not is_valid:
+ exit(1)
+
+#Validate component-specific response that it has required resources to work
+def validateAbilityResponse(component, response):
+ try:
+ if component == RESOURCEMANAGER:
+ nodes = []
+ if response.has_key('nodes') and not response['nodes'] == None and response['nodes'].has_key('node'):
+ nodes = response['nodes']['node']
+ connected_nodes_count = len(nodes)
+ if connected_nodes_count == 0:
+ print 'There is no connected nodemanagers to resourcemanager'
+ return False
+ active_nodes = filter(lambda x: x['state'] == RUNNING_STATE, nodes)
+ active_nodes_count = len(active_nodes)
+
+ if connected_nodes_count == 0:
+ print 'There is no connected active nodemanagers to resourcemanager'
+ return False
+ else:
+ return True
+ else:
+ return False
+ except Exception as e:
+ print 'Error validation of ability response', e
+ return False
+
+#
+# Main.
+#
+def main():
+ parser = optparse.OptionParser(usage="usage: %prog [options] component ")
+ parser.add_option("-p", "--port", dest="address", help="Host:Port for REST API of a desired component")
+ parser.add_option("-s", "--ssl", dest="ssl_enabled", help="Is SSL enabled for UI of component")
+
+ (options, args) = parser.parse_args()
+
+ component = args[0]
+
+ address = options.address
+ ssl_enabled = (options.ssl_enabled) in 'true'
+ if component == RESOURCEMANAGER:
+ path = '/ws/v1/cluster/info'
+ elif component == NODEMANAGER:
+ path = '/ws/v1/node/info'
+ elif component == HISTORYSERVER:
+ path = '/ws/v1/history/info'
+ else:
+ parser.error("Invalid component")
+
+ validateAvailability(component, path, address, ssl_enabled)
+
+ if component == RESOURCEMANAGER:
+ path = '/ws/v1/cluster/nodes'
+ validateAbility(component, path, address, ssl_enabled)
+
+if __name__ == "__main__":
+ main()
http://git-wip-us.apache.org/repos/asf/ambari/blob/53c39154/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/__init__.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/__init__.py
new file mode 100644
index 0000000..35de4bb
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/__init__.py
@@ -0,0 +1,20 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
http://git-wip-us.apache.org/repos/asf/ambari/blob/53c39154/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/application_timeline_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/application_timeline_server.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/application_timeline_server.py
new file mode 100644
index 0000000..e9c0fa1
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/application_timeline_server.py
@@ -0,0 +1,132 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+from resource_management import *
+from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions.security_commons import build_expectations, \
+ cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties,\
+ FILE_TYPE_XML
+from resource_management.libraries.functions.format import format
+
+from yarn import yarn
+from service import service
+
+class ApplicationTimelineServer(Script):
+
+ def install(self, env):
+ self.install_packages(env)
+ #self.configure(env)
+
+ def configure(self, env):
+ import params
+ env.set_params(params)
+ yarn(name='apptimelineserver')
+
+ def pre_rolling_restart(self, env):
+ Logger.info("Executing Rolling Upgrade pre-restart")
+ import params
+ env.set_params(params)
+
+ if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+ Execute(format("hdp-select set hadoop-yarn-timelineserver {version}"))
+
+ def start(self, env, rolling_restart=False):
+ import params
+ env.set_params(params)
+ self.configure(env) # FOR SECURITY
+ service('timelineserver', action='start')
+
+ def stop(self, env, rolling_restart=False):
+ import params
+ env.set_params(params)
+ service('timelineserver', action='stop')
+
+ def status(self, env):
+ import status_params
+ env.set_params(status_params)
+ Execute(format("mv {yarn_historyserver_pid_file_old} {yarn_historyserver_pid_file}"),
+ only_if = format("test -e {yarn_historyserver_pid_file_old}", user=status_params.yarn_user))
+ functions.check_process_status(status_params.yarn_historyserver_pid_file)
+
+ def security_status(self, env):
+ import status_params
+ env.set_params(status_params)
+
+ props_value_check = {"yarn.timeline-service.enabled": "true",
+ "yarn.timeline-service.http-authentication.type": "kerberos",
+ "yarn.acl.enable": "true"}
+ props_empty_check = ["yarn.timeline-service.principal",
+ "yarn.timeline-service.keytab",
+ "yarn.timeline-service.http-authentication.kerberos.principal",
+ "yarn.timeline-service.http-authentication.kerberos.keytab"]
+
+ props_read_check = ["yarn.timeline-service.keytab",
+ "yarn.timeline-service.http-authentication.kerberos.keytab"]
+ yarn_site_props = build_expectations('yarn-site', props_value_check, props_empty_check,
+ props_read_check)
+
+ yarn_expectations ={}
+ yarn_expectations.update(yarn_site_props)
+
+ security_params = get_params_from_filesystem(status_params.hadoop_conf_dir,
+ {'yarn-site.xml': FILE_TYPE_XML})
+ result_issues = validate_security_config_properties(security_params, yarn_expectations)
+ if not result_issues: # If all validations passed successfully
+ try:
+ # Double check the dict before calling execute
+ if ( 'yarn-site' not in security_params
+ or 'yarn.timeline-service.keytab' not in security_params['yarn-site']
+ or 'yarn.timeline-service.principal' not in security_params['yarn-site']) \
+ or 'yarn.timeline-service.http-authentication.kerberos.keytab' not in security_params['yarn-site'] \
+ or 'yarn.timeline-service.http-authentication.kerberos.principal' not in security_params['yarn-site']:
+ self.put_structured_out({"securityState": "UNSECURED"})
+ self.put_structured_out(
+ {"securityIssuesFound": "Keytab file or principal are not set property."})
+ return
+
+ cached_kinit_executor(status_params.kinit_path_local,
+ status_params.yarn_user,
+ security_params['yarn-site']['yarn.timeline-service.keytab'],
+ security_params['yarn-site']['yarn.timeline-service.principal'],
+ status_params.hostname,
+ status_params.tmp_dir,
+ 30)
+ cached_kinit_executor(status_params.kinit_path_local,
+ status_params.yarn_user,
+ security_params['yarn-site']['yarn.timeline-service.http-authentication.kerberos.keytab'],
+ security_params['yarn-site']['yarn.timeline-service.http-authentication.kerberos.principal'],
+ status_params.hostname,
+ status_params.tmp_dir,
+ 30)
+ self.put_structured_out({"securityState": "SECURED_KERBEROS"})
+ except Exception as e:
+ self.put_structured_out({"securityState": "ERROR"})
+ self.put_structured_out({"securityStateErrorInfo": str(e)})
+ else:
+ issues = []
+ for cf in result_issues:
+ issues.append("Configuration file %s did not pass the validation. Reason: %s" % (cf, result_issues[cf]))
+ self.put_structured_out({"securityIssuesFound": ". ".join(issues)})
+ self.put_structured_out({"securityState": "UNSECURED"})
+
+
+if __name__ == "__main__":
+ ApplicationTimelineServer().execute()
http://git-wip-us.apache.org/repos/asf/ambari/blob/53c39154/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
new file mode 100644
index 0000000..66c2143
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
@@ -0,0 +1,123 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+from resource_management import *
+from resource_management.libraries.functions.dynamic_variable_interpretation import copy_tarballs_to_hdfs
+from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions.format import format
+from resource_management.libraries.functions.security_commons import build_expectations, \
+ cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
+ FILE_TYPE_XML
+
+from yarn import yarn
+from service import service
+
+class HistoryServer(Script):
+ def install(self, env):
+ self.install_packages(env)
+
+ def configure(self, env):
+ import params
+ env.set_params(params)
+ yarn(name="historyserver")
+
+ def pre_rolling_restart(self, env):
+ Logger.info("Executing Rolling Upgrade pre-restart")
+ import params
+ env.set_params(params)
+
+ if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+ Execute(format("hdp-select set hadoop-mapreduce-historyserver {version}"))
+ copy_tarballs_to_hdfs('mapreduce', params.mapred_user, params.hdfs_user, params.user_group)
+
+ def start(self, env, rolling_restart=False):
+ import params
+ env.set_params(params)
+ self.configure(env) # FOR SECURITY
+ copy_tarballs_to_hdfs('mapreduce', params.mapred_user, params.hdfs_user, params.user_group)
+ service('historyserver', action='start', serviceName='mapreduce')
+
+ def stop(self, env, rolling_restart=False):
+ import params
+ env.set_params(params)
+ service('historyserver', action='stop', serviceName='mapreduce')
+
+ def status(self, env):
+ import status_params
+ env.set_params(status_params)
+ check_process_status(status_params.mapred_historyserver_pid_file)
+
+ def security_status(self, env):
+ import status_params
+ env.set_params(status_params)
+
+ expectations = {}
+ expectations.update(build_expectations('mapred-site',
+ None,
+ [
+ 'mapreduce.jobhistory.keytab',
+ 'mapreduce.jobhistory.principal',
+ 'mapreduce.jobhistory.webapp.spnego-keytab-file',
+ 'mapreduce.jobhistory.webapp.spnego-principal'
+ ],
+ None))
+
+ security_params = get_params_from_filesystem(status_params.hadoop_conf_dir,
+ {'mapred-site.xml': FILE_TYPE_XML})
+ result_issues = validate_security_config_properties(security_params, expectations)
+ if not result_issues: # If all validations passed successfully
+ try:
+ # Double check the dict before calling execute
+ if ( 'mapred-site' not in security_params or
+ 'mapreduce.jobhistory.keytab' not in security_params['mapred-site'] or
+ 'mapreduce.jobhistory.principal' not in security_params['mapred-site'] or
+ 'mapreduce.jobhistory.webapp.spnego-keytab-file' not in security_params['mapred-site'] or
+ 'mapreduce.jobhistory.webapp.spnego-principal' not in security_params['mapred-site']):
+ self.put_structured_out({"securityState": "UNSECURED"})
+ self.put_structured_out(
+ {"securityIssuesFound": "Keytab file or principal not set."})
+ return
+
+ cached_kinit_executor(status_params.kinit_path_local,
+ status_params.mapred_user,
+ security_params['mapred-site']['mapreduce.jobhistory.keytab'],
+ security_params['mapred-site']['mapreduce.jobhistory.principal'],
+ status_params.hostname,
+ status_params.tmp_dir)
+ cached_kinit_executor(status_params.kinit_path_local,
+ status_params.mapred_user,
+ security_params['mapred-site']['mapreduce.jobhistory.webapp.spnego-keytab-file'],
+ security_params['mapred-site']['mapreduce.jobhistory.webapp.spnego-principal'],
+ status_params.hostname,
+ status_params.tmp_dir)
+ self.put_structured_out({"securityState": "SECURED_KERBEROS"})
+ except Exception as e:
+ self.put_structured_out({"securityState": "ERROR"})
+ self.put_structured_out({"securityStateErrorInfo": str(e)})
+ else:
+ issues = []
+ for cf in result_issues:
+ issues.append("Configuration file %s did not pass the validation. Reason: %s" % (cf, result_issues[cf]))
+ self.put_structured_out({"securityIssuesFound": ". ".join(issues)})
+ self.put_structured_out({"securityState": "UNSECURED"})
+
+if __name__ == "__main__":
+ HistoryServer().execute()
http://git-wip-us.apache.org/repos/asf/ambari/blob/53c39154/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapred_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapred_service_check.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapred_service_check.py
new file mode 100644
index 0000000..a9f4367
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapred_service_check.py
@@ -0,0 +1,77 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+from resource_management import *
+
+class MapReduce2ServiceCheck(Script):
+ def service_check(self, env):
+ import params
+ env.set_params(params)
+
+ jar_path = format("{hadoop_mapred2_jar_location}/{hadoopMapredExamplesJarName}")
+ input_file = format("/user/{smokeuser}/mapredsmokeinput")
+ output_file = format("/user/{smokeuser}/mapredsmokeoutput")
+
+ cleanup_cmd = format("fs -rm -r -f {output_file} {input_file}")
+ create_file_cmd = format("fs -put /etc/passwd {input_file}")
+ test_cmd = format("fs -test -e {output_file}")
+ run_wordcount_job = format("jar {jar_path} wordcount {input_file} {output_file}")
+
+ if params.security_enabled:
+ kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser};")
+
+ Execute(kinit_cmd,
+ user=params.smokeuser
+ )
+
+ ExecuteHadoop(cleanup_cmd,
+ tries=1,
+ try_sleep=5,
+ user=params.smokeuser,
+ bin_dir=params.execute_path,
+ conf_dir=params.hadoop_conf_dir
+ )
+
+ ExecuteHadoop(create_file_cmd,
+ tries=1,
+ try_sleep=5,
+ user=params.smokeuser,
+ bin_dir=params.execute_path,
+ conf_dir=params.hadoop_conf_dir
+ )
+
+ ExecuteHadoop(run_wordcount_job,
+ tries=1,
+ try_sleep=5,
+ user=params.smokeuser,
+ bin_dir=params.execute_path,
+ conf_dir=params.hadoop_conf_dir,
+ logoutput=True
+ )
+
+ ExecuteHadoop(test_cmd,
+ user=params.smokeuser,
+ bin_dir=params.execute_path,
+ conf_dir=params.hadoop_conf_dir
+ )
+
+if __name__ == "__main__":
+ MapReduce2ServiceCheck().execute()
http://git-wip-us.apache.org/repos/asf/ambari/blob/53c39154/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapreduce2_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapreduce2_client.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapreduce2_client.py
new file mode 100644
index 0000000..c4e2800
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapreduce2_client.py
@@ -0,0 +1,49 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+import sys
+from resource_management import *
+
+from yarn import yarn
+
+class MapReduce2Client(Script):
+
+ def pre_rolling_restart(self, env):
+ import params
+ env.set_params(params)
+
+ if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+ Execute(format("hdp-select set hadoop-client {version}"))
+
+ def install(self, env):
+ self.install_packages(env)
+ self.configure(env)
+
+ def configure(self, env):
+ import params
+ env.set_params(params)
+ yarn()
+
+ def status(self, env):
+ raise ClientComponentHasNoStatus()
+
+if __name__ == "__main__":
+ MapReduce2Client().execute()
http://git-wip-us.apache.org/repos/asf/ambari/blob/53c39154/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/nodemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/nodemanager.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/nodemanager.py
new file mode 100644
index 0000000..87be214
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/nodemanager.py
@@ -0,0 +1,137 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+import nodemanager_upgrade
+
+from resource_management import *
+from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions.format import format
+from resource_management.libraries.functions.security_commons import build_expectations, \
+ cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
+ FILE_TYPE_XML
+
+from yarn import yarn
+from service import service
+
+class Nodemanager(Script):
+ def install(self, env):
+ self.install_packages(env)
+
+ def configure(self, env):
+ import params
+ env.set_params(params)
+ yarn(name="nodemanager")
+
+ def pre_rolling_restart(self, env):
+ Logger.info("Executing NodeManager Rolling Upgrade pre-restart")
+ import params
+ env.set_params(params)
+
+ if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+ Execute(format("hdp-select set hadoop-yarn-nodemanager {version}"))
+
+ def start(self, env, rolling_restart=False):
+ import params
+ env.set_params(params)
+ self.configure(env) # FOR SECURITY
+ service('nodemanager',action='start')
+
+ def post_rolling_restart(self, env):
+ Logger.info("Executing NodeManager Rolling Upgrade post-restart")
+ import params
+ env.set_params(params)
+
+ nodemanager_upgrade.post_upgrade_check()
+
+ def stop(self, env, rolling_restart=False):
+ import params
+ env.set_params(params)
+
+ service('nodemanager',action='stop')
+
+ def status(self, env):
+ import status_params
+ env.set_params(status_params)
+ check_process_status(status_params.nodemanager_pid_file)
+
+ def security_status(self, env):
+ import status_params
+ env.set_params(status_params)
+
+ props_value_check = {"yarn.timeline-service.enabled": "true",
+ "yarn.timeline-service.http-authentication.type": "kerberos",
+ "yarn.acl.enable": "true"}
+ props_empty_check = ["yarn.nodemanager.principal",
+ "yarn.nodemanager.keytab",
+ "yarn.nodemanager.webapp.spnego-principal",
+ "yarn.nodemanager.webapp.spnego-keytab-file"]
+
+ props_read_check = ["yarn.nodemanager.keytab",
+ "yarn.nodemanager.webapp.spnego-keytab-file"]
+ yarn_site_props = build_expectations('yarn-site', props_value_check, props_empty_check,
+ props_read_check)
+
+ yarn_expectations ={}
+ yarn_expectations.update(yarn_site_props)
+
+ security_params = get_params_from_filesystem(status_params.hadoop_conf_dir,
+ {'yarn-site.xml': FILE_TYPE_XML})
+ result_issues = validate_security_config_properties(security_params, yarn_site_props)
+ if not result_issues: # If all validations passed successfully
+ try:
+ # Double check the dict before calling execute
+ if ( 'yarn-site' not in security_params
+ or 'yarn.nodemanager.keytab' not in security_params['yarn-site']
+ or 'yarn.nodemanager.principal' not in security_params['yarn-site']) \
+ or 'yarn.nodemanager.webapp.spnego-keytab-file' not in security_params['yarn-site'] \
+ or 'yarn.nodemanager.webapp.spnego-principal' not in security_params['yarn-site']:
+ self.put_structured_out({"securityState": "UNSECURED"})
+ self.put_structured_out(
+ {"securityIssuesFound": "Keytab file or principal are not set property."})
+ return
+
+ cached_kinit_executor(status_params.kinit_path_local,
+ status_params.yarn_user,
+ security_params['yarn-site']['yarn.nodemanager.keytab'],
+ security_params['yarn-site']['yarn.nodemanager.principal'],
+ status_params.hostname,
+ status_params.tmp_dir,
+ 30)
+ cached_kinit_executor(status_params.kinit_path_local,
+ status_params.yarn_user,
+ security_params['yarn-site']['yarn.nodemanager.webapp.spnego-keytab-file'],
+ security_params['yarn-site']['yarn.nodemanager.webapp.spnego-principal'],
+ status_params.hostname,
+ status_params.tmp_dir,
+ 30)
+ self.put_structured_out({"securityState": "SECURED_KERBEROS"})
+ except Exception as e:
+ self.put_structured_out({"securityState": "ERROR"})
+ self.put_structured_out({"securityStateErrorInfo": str(e)})
+ else:
+ issues = []
+ for cf in result_issues:
+ issues.append("Configuration file %s did not pass the validation. Reason: %s" % (cf, result_issues[cf]))
+ self.put_structured_out({"securityIssuesFound": ". ".join(issues)})
+ self.put_structured_out({"securityState": "UNSECURED"})
+
+if __name__ == "__main__":
+ Nodemanager().execute()
http://git-wip-us.apache.org/repos/asf/ambari/blob/53c39154/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/nodemanager_upgrade.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/nodemanager_upgrade.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/nodemanager_upgrade.py
new file mode 100644
index 0000000..54e0fae
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/nodemanager_upgrade.py
@@ -0,0 +1,74 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import subprocess
+
+from resource_management.core.logger import Logger
+from resource_management.core.exceptions import Fail
+from resource_management.core.resources.system import Execute
+from resource_management.core import shell
+from resource_management.libraries.functions.decorator import retry
+
+
+def post_upgrade_check():
+ '''
+ Checks that the NodeManager has rejoined the cluster.
+ This function will obtain the Kerberos ticket if security is enabled.
+ :return:
+ '''
+ import params
+
+ Logger.info('NodeManager executing "yarn node -list -states=RUNNING" to verify the node has rejoined the cluster...')
+ if params.security_enabled and params.nodemanager_kinit_cmd:
+ Execute(params.nodemanager_kinit_cmd, user = params.yarn_user)
+
+ _check_nodemanager_startup()
+
+
+@retry(times=12, sleep_time=10, err_class=Fail)
+def _check_nodemanager_startup():
+ '''
+ Checks that a NodeManager is in a RUNNING state in the cluster via
+ "yarn node -list -states=RUNNING" command. Once the NodeManager is found to be
+ alive this method will return, otherwise it will raise a Fail(...) and retry
+ automatically.
+ :return:
+ '''
+ import params
+
+ command = 'yarn node -list -states=RUNNING'
+
+ try:
+ # 'su - yarn -c "yarn node -status c6401.ambari.apache.org:45454"'
+ return_code, yarn_output = shell.call(command, user=params.hdfs_user)
+ except:
+ raise Fail('Unable to determine if the NodeManager has started after upgrade.')
+
+ if return_code == 0:
+ hostname = params.hostname.lower()
+ nodemanager_address = params.nm_address.lower()
+ yarn_output = yarn_output.lower()
+
+ if hostname in yarn_output or nodemanager_address in yarn_output:
+ Logger.info('NodeManager with ID {0} has rejoined the cluster.'.format(nodemanager_address))
+ return
+ else:
+ raise Fail('NodeManager with ID {0} was not found in the list of running NodeManagers'.format(nodemanager_address))
+
+ raise Fail('Unable to determine if the NodeManager has started after upgrade (result code {0})'.format(str(return_code)))
http://git-wip-us.apache.org/repos/asf/ambari/blob/53c39154/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params.py
new file mode 100644
index 0000000..d9c73bf
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params.py
@@ -0,0 +1,209 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+import os
+from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
+from resource_management.libraries.functions.default import default
+from resource_management import *
+import status_params
+
+# server configurations
+config = Script.get_config()
+tmp_dir = Script.get_tmp_dir()
+
+# This is expected to be of the form #.#.#.#
+stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
+hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+
+# New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
+version = default("/commandParams/version", None)
+
+hostname = config['hostname']
+
+#hadoop params
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
+ yarn_role_root = "hadoop-yarn-client"
+ mapred_role_root = "hadoop-mapreduce-client"
+
+ command_role = default("/role", "")
+ if command_role == "APP_TIMELINE_SERVER":
+ yarn_role_root = "hadoop-yarn-timelineserver"
+ elif command_role == "HISTORYSERVER":
+ mapred_role_root = "hadoop-mapreduce-historyserver"
+ elif command_role == "MAPREDUCE2_CLIENT":
+ mapred_role_root = "hadoop-mapreduce-client"
+ elif command_role == "NODEMANAGER":
+ yarn_role_root = "hadoop-yarn-nodemanager"
+ elif command_role == "RESOURCEMANAGER":
+ yarn_role_root = "hadoop-yarn-resourcemanager"
+ elif command_role == "YARN_CLIENT":
+ yarn_role_root = "hadoop-yarn-client"
+
+ hadoop_libexec_dir = "/usr/hdp/current/hadoop-client/libexec"
+ hadoop_bin = "/usr/hdp/current/hadoop-client/sbin"
+ hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
+
+ hadoop_mapred2_jar_location = format("/usr/hdp/current/{mapred_role_root}")
+ mapred_bin = format("/usr/hdp/current/{mapred_role_root}/sbin")
+
+ hadoop_yarn_home = format("/usr/hdp/current/{yarn_role_root}")
+ yarn_bin = format("/usr/hdp/current/{yarn_role_root}/sbin")
+ yarn_container_bin = format("/usr/hdp/current/{yarn_role_root}/bin")
+else:
+ hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
+ hadoop_bin = "/usr/lib/hadoop/sbin"
+ hadoop_bin_dir = "/usr/bin"
+ hadoop_yarn_home = '/usr/lib/hadoop-yarn'
+ hadoop_mapred2_jar_location = "/usr/lib/hadoop-mapreduce"
+ mapred_bin = "/usr/lib/hadoop-mapreduce/sbin"
+ yarn_bin = "/usr/lib/hadoop-yarn/sbin"
+ yarn_container_bin = "/usr/lib/hadoop-yarn/bin"
+
+hadoop_conf_dir = "/etc/hadoop/conf"
+limits_conf_dir = "/etc/security/limits.d"
+execute_path = os.environ['PATH'] + os.pathsep + hadoop_bin_dir + os.pathsep + yarn_container_bin
+
+ulimit_cmd = "ulimit -c unlimited;"
+
+mapred_user = status_params.mapred_user
+yarn_user = status_params.yarn_user
+hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+
+smokeuser = config['configurations']['cluster-env']['smokeuser']
+security_enabled = config['configurations']['cluster-env']['security_enabled']
+smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
+yarn_executor_container_group = config['configurations']['yarn-site']['yarn.nodemanager.linux-container-executor.group']
+kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+rm_hosts = config['clusterHostInfo']['rm_host']
+rm_host = rm_hosts[0]
+rm_port = config['configurations']['yarn-site']['yarn.resourcemanager.webapp.address'].split(':')[-1]
+rm_https_port = "8090"
+# TODO UPGRADE default, update site during upgrade
+rm_nodes_exclude_path = default("/configurations/yarn-site/yarn.resourcemanager.nodes.exclude-path","/etc/hadoop/conf/yarn.exclude")
+
+java64_home = config['hostLevelParams']['java_home']
+hadoop_ssl_enabled = default("/configurations/core-site/hadoop.ssl.enabled", False)
+
+yarn_heapsize = config['configurations']['yarn-env']['yarn_heapsize']
+resourcemanager_heapsize = config['configurations']['yarn-env']['resourcemanager_heapsize']
+nodemanager_heapsize = config['configurations']['yarn-env']['nodemanager_heapsize']
+apptimelineserver_heapsize = default("/configurations/yarn-env/apptimelineserver_heapsize", 1024)
+ats_leveldb_dir = config['configurations']['yarn-site']['yarn.timeline-service.leveldb-timeline-store.path']
+yarn_log_dir_prefix = config['configurations']['yarn-env']['yarn_log_dir_prefix']
+yarn_pid_dir_prefix = status_params.yarn_pid_dir_prefix
+mapred_pid_dir_prefix = status_params.mapred_pid_dir_prefix
+mapred_log_dir_prefix = config['configurations']['mapred-env']['mapred_log_dir_prefix']
+mapred_env_sh_template = config['configurations']['mapred-env']['content']
+yarn_env_sh_template = config['configurations']['yarn-env']['content']
+
+if len(rm_hosts) > 1:
+ additional_rm_host = rm_hosts[1]
+ rm_webui_address = format("{rm_host}:{rm_port},{additional_rm_host}:{rm_port}")
+ rm_webui_https_address = format("{rm_host}:{rm_https_port},{additional_rm_host}:{rm_https_port}")
+else:
+ rm_webui_address = format("{rm_host}:{rm_port}")
+ rm_webui_https_address = format("{rm_host}:{rm_https_port}")
+
+nm_webui_address = config['configurations']['yarn-site']['yarn.nodemanager.webapp.address']
+hs_webui_address = config['configurations']['mapred-site']['mapreduce.jobhistory.webapp.address']
+nm_address = config['configurations']['yarn-site']['yarn.nodemanager.address'] # still contains 0.0.0.0
+if hostname and nm_address and nm_address.startswith("0.0.0.0:"):
+ nm_address = nm_address.replace("0.0.0.0", hostname)
+
+nm_local_dirs = config['configurations']['yarn-site']['yarn.nodemanager.local-dirs']
+nm_log_dirs = config['configurations']['yarn-site']['yarn.nodemanager.log-dirs']
+
+distrAppJarName = "hadoop-yarn-applications-distributedshell-2.*.jar"
+hadoopMapredExamplesJarName = "hadoop-mapreduce-examples-2.*.jar"
+
+yarn_pid_dir = status_params.yarn_pid_dir
+mapred_pid_dir = status_params.mapred_pid_dir
+
+mapred_log_dir = format("{mapred_log_dir_prefix}/{mapred_user}")
+yarn_log_dir = format("{yarn_log_dir_prefix}/{yarn_user}")
+mapred_job_summary_log = format("{mapred_log_dir_prefix}/{mapred_user}/hadoop-mapreduce.jobsummary.log")
+yarn_job_summary_log = format("{yarn_log_dir_prefix}/{yarn_user}/hadoop-mapreduce.jobsummary.log")
+
+user_group = config['configurations']['cluster-env']['user_group']
+
+#exclude file
+exclude_hosts = default("/clusterHostInfo/decom_nm_hosts", [])
+exclude_file_path = default("/configurations/yarn-site/yarn.resourcemanager.nodes.exclude-path","/etc/hadoop/conf/yarn.exclude")
+
+ats_host = set(default("/clusterHostInfo/app_timeline_server_hosts", []))
+has_ats = not len(ats_host) == 0
+
+# default kinit commands
+rm_kinit_cmd = ""
+yarn_timelineservice_kinit_cmd = ""
+nodemanager_kinit_cmd = ""
+
+if security_enabled:
+ _rm_principal_name = config['configurations']['yarn-site']['yarn.resourcemanager.principal']
+ _rm_principal_name = _rm_principal_name.replace('_HOST',hostname.lower())
+ _rm_keytab = config['configurations']['yarn-site']['yarn.resourcemanager.keytab']
+ rm_kinit_cmd = format("{kinit_path_local} -kt {_rm_keytab} {_rm_principal_name};")
+
+ # YARN timeline security options are only available in HDP Champlain
+ if has_ats:
+ _yarn_timelineservice_principal_name = config['configurations']['yarn-site']['yarn.timeline-service.principal']
+ _yarn_timelineservice_principal_name = _yarn_timelineservice_principal_name.replace('_HOST', hostname.lower())
+ _yarn_timelineservice_keytab = config['configurations']['yarn-site']['yarn.timeline-service.keytab']
+ yarn_timelineservice_kinit_cmd = format("{kinit_path_local} -kt {_yarn_timelineservice_keytab} {_yarn_timelineservice_principal_name};")
+
+ if 'yarn.nodemanager.principal' in config['configurations']['yarn-site']:
+ _nodemanager_principal_name = config['configurations']['yarn-site']['yarn.nodemanager.principal']
+ _nodemanager_keytab = config['configurations']['yarn-site']['yarn.nodemanager.keytab']
+ nodemanager_kinit_cmd = format("{kinit_path_local} -kt {_nodemanager_keytab} {_nodemanager_principal_name};")
+
+
+yarn_log_aggregation_enabled = config['configurations']['yarn-site']['yarn.log-aggregation-enable']
+yarn_nm_app_log_dir = config['configurations']['yarn-site']['yarn.nodemanager.remote-app-log-dir']
+mapreduce_jobhistory_intermediate_done_dir = config['configurations']['mapred-site']['mapreduce.jobhistory.intermediate-done-dir']
+mapreduce_jobhistory_done_dir = config['configurations']['mapred-site']['mapreduce.jobhistory.done-dir']
+jobhistory_heapsize = default("/configurations/mapred-env/jobhistory_heapsize", "900")
+
+#for create_hdfs_directory
+hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
+hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
+import functools
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+ HdfsDirectory,
+ conf_dir=hadoop_conf_dir,
+ hdfs_user=hdfs_user,
+ security_enabled = security_enabled,
+ keytab = hdfs_user_keytab,
+ kinit_path_local = kinit_path_local,
+ bin_dir = hadoop_bin_dir
+)
+update_exclude_file_only = default("/commandParams/update_exclude_file_only",False)
+
+mapred_tt_group = default("/configurations/mapred-site/mapreduce.tasktracker.group", user_group)
+
+#taskcontroller.cfg
+
+mapred_local_dir = "/tmp/hadoop-mapred/mapred/local"
+hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
+min_user_id = config['configurations']['yarn-env']['min_user_id']
+
+# Node labels
+node_labels_dir = default("/configurations/yarn-site/yarn.node-labels.fs-store.root-dir", None)
http://git-wip-us.apache.org/repos/asf/ambari/blob/53c39154/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
new file mode 100644
index 0000000..3d6a00e
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
@@ -0,0 +1,173 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+from resource_management import *
+from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions.security_commons import build_expectations, \
+ cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
+ FILE_TYPE_XML
+
+from yarn import yarn
+from service import service
+
+
+class Resourcemanager(Script):
+ def install(self, env):
+ self.install_packages(env)
+
+ def configure(self, env):
+ import params
+
+ env.set_params(params)
+ yarn(name='resourcemanager')
+
+ def pre_rolling_restart(self, env):
+ Logger.info("Executing Rolling Upgrade post-restart")
+ import params
+ env.set_params(params)
+
+ if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+ Execute(format("hdp-select set hadoop-yarn-resourcemanager {version}"))
+
+ def start(self, env, rolling_restart=False):
+ import params
+
+ env.set_params(params)
+ self.configure(env) # FOR SECURITY
+ service('resourcemanager',
+ action='start'
+ )
+
+ def stop(self, env, rolling_restart=False):
+ import params
+
+ env.set_params(params)
+
+ service('resourcemanager',
+ action='stop'
+ )
+
+ def status(self, env):
+ import status_params
+
+ env.set_params(status_params)
+ check_process_status(status_params.resourcemanager_pid_file)
+ pass
+
+ def security_status(self, env):
+ import status_params
+ env.set_params(status_params)
+
+ props_value_check = {"yarn.timeline-service.enabled": "true",
+ "yarn.timeline-service.http-authentication.type": "kerberos",
+ "yarn.acl.enable": "true"}
+ props_empty_check = ["yarn.resourcemanager.principal",
+ "yarn.resourcemanager.keytab",
+ "yarn.resourcemanager.webapp.spnego-principal",
+ "yarn.resourcemanager.webapp.spnego-keytab-file"]
+
+ props_read_check = ["yarn.resourcemanager.keytab",
+ "yarn.resourcemanager.webapp.spnego-keytab-file"]
+ yarn_site_props = build_expectations('yarn-site', props_value_check, props_empty_check,
+ props_read_check)
+
+ yarn_expectations ={}
+ yarn_expectations.update(yarn_site_props)
+
+ security_params = get_params_from_filesystem(status_params.hadoop_conf_dir,
+ {'yarn-site.xml': FILE_TYPE_XML})
+ result_issues = validate_security_config_properties(security_params, yarn_site_props)
+ if not result_issues: # If all validations passed successfully
+ try:
+ # Double check the dict before calling execute
+ if ( 'yarn-site' not in security_params
+ or 'yarn.resourcemanager.keytab' not in security_params['yarn-site']
+ or 'yarn.resourcemanager.principal' not in security_params['yarn-site']) \
+ or 'yarn.resourcemanager.webapp.spnego-keytab-file' not in security_params['yarn-site'] \
+ or 'yarn.resourcemanager.webapp.spnego-principal' not in security_params['yarn-site']:
+ self.put_structured_out({"securityState": "UNSECURED"})
+ self.put_structured_out(
+ {"securityIssuesFound": "Keytab file or principal are not set property."})
+ return
+
+ cached_kinit_executor(status_params.kinit_path_local,
+ status_params.yarn_user,
+ security_params['yarn-site']['yarn.resourcemanager.keytab'],
+ security_params['yarn-site']['yarn.resourcemanager.principal'],
+ status_params.hostname,
+ status_params.tmp_dir,
+ 30)
+ cached_kinit_executor(status_params.kinit_path_local,
+ status_params.yarn_user,
+ security_params['yarn-site']['yarn.resourcemanager.webapp.spnego-keytab-file'],
+ security_params['yarn-site']['yarn.resourcemanager.webapp.spnego-principal'],
+ status_params.hostname,
+ status_params.tmp_dir,
+ 30)
+ self.put_structured_out({"securityState": "SECURED_KERBEROS"})
+ except Exception as e:
+ self.put_structured_out({"securityState": "ERROR"})
+ self.put_structured_out({"securityStateErrorInfo": str(e)})
+ else:
+ issues = []
+ for cf in result_issues:
+ issues.append("Configuration file %s did not pass the validation. Reason: %s" % (cf, result_issues[cf]))
+ self.put_structured_out({"securityIssuesFound": ". ".join(issues)})
+ self.put_structured_out({"securityState": "UNSECURED"})
+
+
+ def refreshqueues(self, env):
+ import params
+
+ self.configure(env)
+ env.set_params(params)
+
+ service('resourcemanager',
+ action='refreshQueues'
+ )
+
+ def decommission(self, env):
+ import params
+
+ env.set_params(params)
+ rm_kinit_cmd = params.rm_kinit_cmd
+ yarn_user = params.yarn_user
+ conf_dir = params.hadoop_conf_dir
+ user_group = params.user_group
+
+ yarn_refresh_cmd = format("{rm_kinit_cmd} yarn --config {conf_dir} rmadmin -refreshNodes")
+
+ File(params.exclude_file_path,
+ content=Template("exclude_hosts_list.j2"),
+ owner=yarn_user,
+ group=user_group
+ )
+
+ if params.update_exclude_file_only == False:
+ Execute(yarn_refresh_cmd,
+ environment= {'PATH' : params.execute_path },
+ user=yarn_user)
+ pass
+ pass
+
+
+if __name__ == "__main__":
+ Resourcemanager().execute()
http://git-wip-us.apache.org/repos/asf/ambari/blob/53c39154/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service.py
new file mode 100644
index 0000000..1002094
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service.py
@@ -0,0 +1,76 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+from resource_management import *
+
+
+def service(componentName, action='start', serviceName='yarn'):
+
+ import params
+
+ if serviceName == 'mapreduce' and componentName == 'historyserver':
+ daemon = format("{mapred_bin}/mr-jobhistory-daemon.sh")
+ pid_file = format("{mapred_pid_dir}/mapred-{mapred_user}-{componentName}.pid")
+ usr = params.mapred_user
+ else:
+ daemon = format("{yarn_bin}/yarn-daemon.sh")
+ pid_file = format("{yarn_pid_dir}/yarn-{yarn_user}-{componentName}.pid")
+ usr = params.yarn_user
+
+ cmd = format("export HADOOP_LIBEXEC_DIR={hadoop_libexec_dir} && {daemon} --config {hadoop_conf_dir}")
+
+ if action == 'start':
+ daemon_cmd = format("{ulimit_cmd} {cmd} start {componentName}")
+ check_process = format("ls {pid_file} >/dev/null 2>&1 && ps -p `cat {pid_file}` >/dev/null 2>&1")
+
+ # Remove the pid file if its corresponding process is not running.
+ File(pid_file,
+ action="delete",
+ not_if=check_process)
+
+ # Attempt to start the process. Internally, this is skipped if the process is already running.
+ Execute(daemon_cmd,
+ user=usr,
+ not_if=check_process
+ )
+
+ # Ensure that the process with the expected PID exists.
+ Execute(check_process,
+ user=usr,
+ not_if=check_process,
+ initial_wait=5
+ )
+
+ elif action == 'stop':
+ daemon_cmd = format("{cmd} stop {componentName}")
+ Execute(daemon_cmd,
+ user=usr)
+
+ File(pid_file,
+ action="delete")
+
+ elif action == 'refreshQueues':
+ rm_kinit_cmd = params.rm_kinit_cmd
+ refresh_cmd = format("{rm_kinit_cmd} export HADOOP_LIBEXEC_DIR={hadoop_libexec_dir} && {yarn_container_bin}/yarn rmadmin -refreshQueues")
+
+ Execute(refresh_cmd,
+ user=usr,
+ )
http://git-wip-us.apache.org/repos/asf/ambari/blob/53c39154/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service_check.py
new file mode 100644
index 0000000..7189664
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service_check.py
@@ -0,0 +1,68 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+from resource_management import *
+import sys
+
+class ServiceCheck(Script):
+ def service_check(self, env):
+ import params
+ env.set_params(params)
+
+ run_yarn_check_cmd = format("yarn --config {hadoop_conf_dir} node -list")
+
+ component_type = 'rm'
+ if params.hadoop_ssl_enabled:
+ component_address = params.rm_webui_https_address
+ else:
+ component_address = params.rm_webui_address
+
+ validateStatusFileName = "validateYarnComponentStatus.py"
+ validateStatusFilePath = format("{tmp_dir}/{validateStatusFileName}")
+ python_executable = sys.executable
+ validateStatusCmd = format("{python_executable} {validateStatusFilePath} {component_type} -p {component_address} -s {hadoop_ssl_enabled}")
+
+ if params.security_enabled:
+ kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser};")
+ smoke_cmd = format("{kinit_cmd} {validateStatusCmd}")
+ else:
+ smoke_cmd = validateStatusCmd
+
+ File(validateStatusFilePath,
+ content=StaticFile(validateStatusFileName),
+ mode=0755
+ )
+
+ Execute(smoke_cmd,
+ tries=3,
+ try_sleep=5,
+ path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin',
+ user=params.smokeuser,
+ logoutput=True
+ )
+
+ Execute(run_yarn_check_cmd,
+ path=params.execute_path,
+ user=params.smokeuser
+ )
+
+if __name__ == "__main__":
+ ServiceCheck().execute()
http://git-wip-us.apache.org/repos/asf/ambari/blob/53c39154/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/status_params.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/status_params.py
new file mode 100644
index 0000000..3cfff05
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/status_params.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+
+config = Script.get_config()
+tmp_dir = Script.get_tmp_dir()
+
+mapred_user = config['configurations']['mapred-env']['mapred_user']
+yarn_user = config['configurations']['yarn-env']['yarn_user']
+yarn_pid_dir_prefix = config['configurations']['yarn-env']['yarn_pid_dir_prefix']
+mapred_pid_dir_prefix = config['configurations']['mapred-env']['mapred_pid_dir_prefix']
+yarn_pid_dir = format("{yarn_pid_dir_prefix}/{yarn_user}")
+mapred_pid_dir = format("{mapred_pid_dir_prefix}/{mapred_user}")
+
+resourcemanager_pid_file = format("{yarn_pid_dir}/yarn-{yarn_user}-resourcemanager.pid")
+nodemanager_pid_file = format("{yarn_pid_dir}/yarn-{yarn_user}-nodemanager.pid")
+yarn_historyserver_pid_file_old = format("{yarn_pid_dir}/yarn-{yarn_user}-historyserver.pid")
+yarn_historyserver_pid_file = format("{yarn_pid_dir}/yarn-{yarn_user}-timelineserver.pid") # *-historyserver.pid is deprecated
+mapred_historyserver_pid_file = format("{mapred_pid_dir}/mapred-{mapred_user}-historyserver.pid")
+
+# Security related/required params
+hadoop_conf_dir = "/etc/hadoop/conf"
+hostname = config['hostname']
+kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+security_enabled = config['configurations']['cluster-env']['security_enabled']
http://git-wip-us.apache.org/repos/asf/ambari/blob/53c39154/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
new file mode 100644
index 0000000..cf0d211
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn.py
@@ -0,0 +1,249 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+from resource_management import *
+import sys
+import os
+
+
+def yarn(name = None):
+ import params
+
+
+ if name in ["nodemanager","historyserver"]:
+ if params.yarn_log_aggregation_enabled:
+ params.HdfsDirectory(params.yarn_nm_app_log_dir,
+ action="create_delayed",
+ owner=params.yarn_user,
+ group=params.user_group,
+ mode=0777,
+ recursive_chmod=True
+ )
+ params.HdfsDirectory("/mapred",
+ action="create_delayed",
+ owner=params.mapred_user
+ )
+ params.HdfsDirectory("/mapred/system",
+ action="create_delayed",
+ owner=params.hdfs_user
+ )
+ params.HdfsDirectory(params.mapreduce_jobhistory_intermediate_done_dir,
+ action="create_delayed",
+ owner=params.mapred_user,
+ group=params.user_group,
+ mode=0777
+ )
+
+ params.HdfsDirectory(params.mapreduce_jobhistory_done_dir,
+ action="create_delayed",
+ owner=params.mapred_user,
+ group=params.user_group,
+ mode=01777
+ )
+ params.HdfsDirectory(None, action="create")
+
+ if name == "nodemanager":
+ Directory(params.nm_local_dirs.split(',') + params.nm_log_dirs.split(','),
+ owner=params.yarn_user,
+ group=params.user_group,
+ recursive=True,
+ recursive_permission=True,
+ ignore_failures=True,
+ mode=0775
+ )
+
+ Directory([params.yarn_pid_dir_prefix, params.yarn_pid_dir, params.yarn_log_dir],
+ owner=params.yarn_user,
+ group=params.user_group,
+ recursive=True
+ )
+
+ Directory([params.mapred_pid_dir_prefix, params.mapred_pid_dir, params.mapred_log_dir_prefix, params.mapred_log_dir],
+ owner=params.mapred_user,
+ group=params.user_group,
+ recursive=True
+ )
+ Directory([params.yarn_log_dir_prefix],
+ owner=params.yarn_user,
+ recursive=True,
+ ignore_failures=True,
+ )
+
+ XmlConfig("core-site.xml",
+ conf_dir=params.hadoop_conf_dir,
+ configurations=params.config['configurations']['core-site'],
+ configuration_attributes=params.config['configuration_attributes']['core-site'],
+ owner=params.hdfs_user,
+ group=params.user_group,
+ mode=0644
+ )
+
+ XmlConfig("mapred-site.xml",
+ conf_dir=params.hadoop_conf_dir,
+ configurations=params.config['configurations']['mapred-site'],
+ configuration_attributes=params.config['configuration_attributes']['mapred-site'],
+ owner=params.yarn_user,
+ group=params.user_group,
+ mode=0644
+ )
+
+ XmlConfig("yarn-site.xml",
+ conf_dir=params.hadoop_conf_dir,
+ configurations=params.config['configurations']['yarn-site'],
+ configuration_attributes=params.config['configuration_attributes']['yarn-site'],
+ owner=params.yarn_user,
+ group=params.user_group,
+ mode=0644
+ )
+
+ XmlConfig("capacity-scheduler.xml",
+ conf_dir=params.hadoop_conf_dir,
+ configurations=params.config['configurations']['capacity-scheduler'],
+ configuration_attributes=params.config['configuration_attributes']['capacity-scheduler'],
+ owner=params.yarn_user,
+ group=params.user_group,
+ mode=0644
+ )
+
+ if name == 'resourcemanager':
+ File(params.yarn_job_summary_log,
+ owner=params.yarn_user,
+ group=params.user_group
+ )
+ if params.node_labels_dir:
+ params.HdfsDirectory(params.node_labels_dir,
+ action="create",
+ owner=params.yarn_user,
+ group=params.user_group,
+ mode=0700
+ )
+ elif name == 'apptimelineserver':
+ Directory(params.ats_leveldb_dir,
+ owner=params.yarn_user,
+ group=params.user_group,
+ recursive=True,
+ recursive_permission=True
+ )
+
+ File(params.rm_nodes_exclude_path,
+ owner=params.yarn_user,
+ group=params.user_group
+ )
+
+ File(format("{limits_conf_dir}/yarn.conf"),
+ mode=0644,
+ content=Template('yarn.conf.j2')
+ )
+
+ File(format("{limits_conf_dir}/mapreduce.conf"),
+ mode=0644,
+ content=Template('mapreduce.conf.j2')
+ )
+
+ File(format("{hadoop_conf_dir}/yarn-env.sh"),
+ owner=params.yarn_user,
+ group=params.user_group,
+ mode=0755,
+ content=InlineTemplate(params.yarn_env_sh_template)
+ )
+
+ if params.security_enabled:
+ container_executor = format("{yarn_container_bin}/container-executor")
+ File(container_executor,
+ group=params.yarn_executor_container_group,
+ mode=06050
+ )
+
+ File(format("{hadoop_conf_dir}/container-executor.cfg"),
+ group=params.user_group,
+ mode=0644,
+ content=Template('container-executor.cfg.j2')
+ )
+
+
+ if params.security_enabled:
+ tc_mode = 0644
+ tc_owner = "root"
+ else:
+ tc_mode = None
+ tc_owner = params.hdfs_user
+
+ File(format("{hadoop_conf_dir}/mapred-env.sh"),
+ owner=tc_owner,
+ content=InlineTemplate(params.mapred_env_sh_template)
+ )
+
+ if params.security_enabled:
+ File(os.path.join(params.hadoop_bin, "task-controller"),
+ owner="root",
+ group=params.mapred_tt_group,
+ mode=06050
+ )
+ File(os.path.join(params.hadoop_conf_dir, 'taskcontroller.cfg'),
+ owner = tc_owner,
+ mode = tc_mode,
+ group = params.mapred_tt_group,
+ content=Template("taskcontroller.cfg.j2")
+ )
+ else:
+ File(os.path.join(params.hadoop_conf_dir, 'taskcontroller.cfg'),
+ owner=tc_owner,
+ content=Template("taskcontroller.cfg.j2")
+ )
+
+ if "mapred-site" in params.config['configurations']:
+ XmlConfig("mapred-site.xml",
+ conf_dir=params.hadoop_conf_dir,
+ configurations=params.config['configurations']['mapred-site'],
+ configuration_attributes=params.config['configuration_attributes']['mapred-site'],
+ owner=params.mapred_user,
+ group=params.user_group
+ )
+
+ if "capacity-scheduler" in params.config['configurations']:
+ XmlConfig("capacity-scheduler.xml",
+ conf_dir=params.hadoop_conf_dir,
+ configurations=params.config['configurations'][
+ 'capacity-scheduler'],
+ configuration_attributes=params.config['configuration_attributes']['capacity-scheduler'],
+ owner=params.hdfs_user,
+ group=params.user_group
+ )
+
+ if os.path.exists(os.path.join(params.hadoop_conf_dir, 'fair-scheduler.xml')):
+ File(os.path.join(params.hadoop_conf_dir, 'fair-scheduler.xml'),
+ owner=params.mapred_user,
+ group=params.user_group
+ )
+
+ if os.path.exists(
+ os.path.join(params.hadoop_conf_dir, 'ssl-client.xml.example')):
+ File(os.path.join(params.hadoop_conf_dir, 'ssl-client.xml.example'),
+ owner=params.mapred_user,
+ group=params.user_group
+ )
+
+ if os.path.exists(
+ os.path.join(params.hadoop_conf_dir, 'ssl-server.xml.example')):
+ File(os.path.join(params.hadoop_conf_dir, 'ssl-server.xml.example'),
+ owner=params.mapred_user,
+ group=params.user_group
+ )
http://git-wip-us.apache.org/repos/asf/ambari/blob/53c39154/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn_client.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn_client.py
new file mode 100644
index 0000000..60b9bce
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/yarn_client.py
@@ -0,0 +1,49 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+import sys
+from resource_management import *
+
+from yarn import yarn
+
+class YarnClient(Script):
+
+ def pre_rolling_restart(self, env):
+ import params
+ env.set_params(params)
+
+ if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+ Execute(format("hdp-select set hadoop-client {version}"))
+
+ def install(self, env):
+ self.install_packages(env)
+ self.configure(env)
+
+ def configure(self, env):
+ import params
+ env.set_params(params)
+ yarn()
+
+ def status(self, env):
+ raise ClientComponentHasNoStatus()
+
+if __name__ == "__main__":
+ YarnClient().execute()
http://git-wip-us.apache.org/repos/asf/ambari/blob/53c39154/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/container-executor.cfg.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/container-executor.cfg.j2 b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/container-executor.cfg.j2
new file mode 100644
index 0000000..c6f1ff6
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/container-executor.cfg.j2
@@ -0,0 +1,40 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+
+#/*
+# * Licensed to the Apache Software Foundation (ASF) under one
+# * or more contributor license agreements. See the NOTICE file
+# * distributed with this work for additional information
+# * regarding copyright ownership. The ASF licenses this file
+# * to you under the Apache License, Version 2.0 (the
+# * "License"); you may not use this file except in compliance
+# * with the License. You may obtain a copy of the License at
+# *
+# * http://www.apache.org/licenses/LICENSE-2.0
+# *
+# * Unless required by applicable law or agreed to in writing, software
+# * distributed under the License is distributed on an "AS IS" BASIS,
+# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# * See the License for the specific language governing permissions and
+# * limitations under the License.
+# */
+yarn.nodemanager.local-dirs={{nm_local_dirs}}
+yarn.nodemanager.log-dirs={{nm_log_dirs}}
+yarn.nodemanager.linux-container-executor.group={{yarn_executor_container_group}}
+banned.users=hdfs,yarn,mapred,bin
+min.user.id={{min_user_id}}
http://git-wip-us.apache.org/repos/asf/ambari/blob/53c39154/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/exclude_hosts_list.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/exclude_hosts_list.j2 b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/exclude_hosts_list.j2
new file mode 100644
index 0000000..c7ce416
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/exclude_hosts_list.j2
@@ -0,0 +1,21 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+
+{% for host in exclude_hosts %}
+{{host}}
+{% endfor %}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/53c39154/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/mapreduce.conf.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/mapreduce.conf.j2 b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/mapreduce.conf.j2
new file mode 100644
index 0000000..b996645
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/mapreduce.conf.j2
@@ -0,0 +1,35 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+{{mapred_user}} - nofile 32768
+{{mapred_user}} - nproc 65536
http://git-wip-us.apache.org/repos/asf/ambari/blob/53c39154/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/taskcontroller.cfg.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/taskcontroller.cfg.j2 b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/taskcontroller.cfg.j2
new file mode 100644
index 0000000..3d5f4f2
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/templates/taskcontroller.cfg.j2
@@ -0,0 +1,38 @@
+{#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#}
+
+#/*
+# * Licensed to the Apache Software Foundation (ASF) under one
+# * or more contributor license agreements. See the NOTICE file
+# * distributed with this work for additional information
+# * regarding copyright ownership. The ASF licenses this file
+# * to you under the Apache License, Version 2.0 (the
+# * "License"); you may not use this file except in compliance
+# * with the License. You may obtain a copy of the License at
+# *
+# * http://www.apache.org/licenses/LICENSE-2.0
+# *
+# * Unless required by applicable law or agreed to in writing, software
+# * distributed under the License is distributed on an "AS IS" BASIS,
+# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# * See the License for the specific language governing permissions and
+# * limitations under the License.
+# */
+mapred.local.dir={{mapred_local_dir}}
+mapreduce.tasktracker.group={{mapred_tt_group}}
+hadoop.log.dir={{hdfs_log_dir_prefix}}/{{mapred_user}}