You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by vb...@apache.org on 2015/03/25 19:06:35 UTC
ambari git commit: AMBARI-9997. Add Service Check to Tez
Service.(vbrodetskyi)
Repository: ambari
Updated Branches:
refs/heads/trunk ce6d7454f -> 271660a6d
AMBARI-9997. Add Service Check to Tez Service.(vbrodetskyi)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/271660a6
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/271660a6
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/271660a6
Branch: refs/heads/trunk
Commit: 271660a6d0520e836d6613a5972b659cbdebd13e
Parents: ce6d745
Author: Vitaly Brodetskyi <vb...@hortonworks.com>
Authored: Wed Mar 25 19:51:44 2015 +0200
Committer: Vitaly Brodetskyi <vb...@hortonworks.com>
Committed: Wed Mar 25 19:51:44 2015 +0200
----------------------------------------------------------------------
.../libraries/functions/__init__.py | 1 +
.../libraries/functions/get_hdp_version.py | 53 +++++++++
.../0.12.0.2.0/package/scripts/hive_server.py | 5 -
.../0.12.0.2.0/package/scripts/install_jars.py | 107 -------------------
.../1.0.0.2.3/package/scripts/service_check.py | 4 +-
.../common-services/TEZ/0.4.0.2.1/metainfo.xml | 8 +-
.../TEZ/0.4.0.2.1/package/scripts/params.py | 6 ++
.../0.4.0.2.1/package/scripts/service_check.py | 98 +++++++++++++++++
.../2.1.0.2.0/package/scripts/install_jars.py | 107 +++++++++++++++++++
.../YARN/2.1.0.2.0/package/scripts/params.py | 11 ++
.../package/scripts/resourcemanager.py | 8 +-
.../stacks/HDP/2.1/role_command_order.json | 3 +-
.../stacks/2.0.6/HIVE/test_hive_server.py | 66 ++----------
.../stacks/2.0.6/YARN/test_resourcemanager.py | 23 ++--
.../python/stacks/2.1/TEZ/test_service_check.py | 81 ++++++++++++++
.../python/stacks/2.1/TEZ/test_tez_client.py | 5 +-
16 files changed, 398 insertions(+), 188 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/271660a6/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py b/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
index e5d50a9..73607e1 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
@@ -38,6 +38,7 @@ from resource_management.libraries.functions.hive_check import *
from resource_management.libraries.functions.version import *
from resource_management.libraries.functions.format_jvm_option import *
from resource_management.libraries.functions.constants import *
+from resource_management.libraries.functions.get_hdp_version import *
IS_WINDOWS = platform.system() == "Windows"
http://git-wip-us.apache.org/repos/asf/ambari/blob/271660a6/ambari-common/src/main/python/resource_management/libraries/functions/get_hdp_version.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/get_hdp_version.py b/ambari-common/src/main/python/resource_management/libraries/functions/get_hdp_version.py
new file mode 100644
index 0000000..ed34c02
--- /dev/null
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/get_hdp_version.py
@@ -0,0 +1,53 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+__all__ = ["get_hdp_version"]
+import os
+import re
+from resource_management.core.logger import Logger
+from resource_management.core.exceptions import Fail
+from resource_management.core import shell
+
+
+def get_hdp_version(package_name):
+ """
+ @param package_name, name of the package, from which, function will try to get hdp version
+ """
+ try:
+ command = 'hdp-select status ' + package_name
+ return_code, hdp_output = shell.call(command, timeout=20)
+ except Exception, e:
+ Logger.error(str(e))
+ raise Fail('Unable to execute hdp-select command to retrieve the version.')
+
+ if return_code != 0:
+ raise Fail(
+ 'Unable to determine the current version because of a non-zero return code of {0}'.format(str(return_code)))
+
+ hdp_version = re.sub(package_name + ' - ', '', hdp_output)
+ hdp_version = hdp_version.rstrip()
+ match = re.match('[0-9]+.[0-9]+.[0-9]+.[0-9]+-[0-9]+', hdp_version)
+
+ if match is None:
+ raise Fail('Failed to get extracted version')
+
+ return hdp_version
http://git-wip-us.apache.org/repos/asf/ambari/blob/271660a6/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
index abfde14..0e03a22 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
@@ -26,7 +26,6 @@ from resource_management.libraries.functions.dynamic_variable_interpretation imp
from resource_management.libraries.functions.security_commons import build_expectations, \
cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
FILE_TYPE_XML
-from install_jars import install_tez_jars
from setup_ranger_hive import setup_ranger_hive
class HiveServer(Script):
@@ -42,9 +41,6 @@ class HiveServer(Script):
def configure(self, env):
import params
env.set_params(params)
- if not (params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >=0):
- install_tez_jars()
-
hive(name='hiveserver2')
@@ -55,7 +51,6 @@ class HiveServer(Script):
# This function is needed in HDP 2.2, but it is safe to call in earlier versions.
copy_tarballs_to_hdfs('mapreduce', 'hive-server2', params.tez_user, params.hdfs_user, params.user_group)
- copy_tarballs_to_hdfs('tez', 'hive-server2', params.tez_user, params.hdfs_user, params.user_group)
setup_ranger_hive()
hive_service( 'hiveserver2', action = 'start',
rolling_restart=rolling_restart )
http://git-wip-us.apache.org/repos/asf/ambari/blob/271660a6/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/install_jars.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/install_jars.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/install_jars.py
deleted file mode 100644
index a18ca72..0000000
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/install_jars.py
+++ /dev/null
@@ -1,107 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements. See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership. The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-"""
-
-from resource_management import *
-import os
-
-def install_tez_jars():
- import params
-
- destination_hdfs_dirs = get_tez_hdfs_dir_paths(params.tez_lib_uris)
-
- # If tez libraries are to be stored in hdfs
- if destination_hdfs_dirs:
- for hdfs_dir in destination_hdfs_dirs:
- params.HdfsDirectory(hdfs_dir,
- action="create_delayed",
- owner=params.tez_user,
- mode=0755
- )
- pass
- params.HdfsDirectory(None, action="create")
-
- if params.security_enabled:
- kinit_if_needed = format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_principal_name};")
- else:
- kinit_if_needed = ""
-
- if kinit_if_needed:
- Execute(kinit_if_needed,
- user=params.tez_user,
- path='/bin'
- )
- pass
-
- app_dir_path = None
- lib_dir_path = None
-
- if len(destination_hdfs_dirs) > 0:
- for path in destination_hdfs_dirs:
- if 'lib' in path:
- lib_dir_path = path
- else:
- app_dir_path = path
- pass
- pass
- pass
-
- if app_dir_path:
- for scr_file, dest_file in params.app_dir_files.iteritems():
- CopyFromLocal(scr_file,
- mode=0755,
- owner=params.tez_user,
- dest_dir=app_dir_path,
- dest_file=dest_file,
- kinnit_if_needed=kinit_if_needed,
- hdfs_user=params.hdfs_user,
- hadoop_bin_dir=params.hadoop_bin_dir,
- hadoop_conf_dir=params.hadoop_conf_dir
- )
-
- if lib_dir_path:
- CopyFromLocal(params.tez_local_lib_jars,
- mode=0755,
- owner=params.tez_user,
- dest_dir=lib_dir_path,
- kinnit_if_needed=kinit_if_needed,
- hdfs_user=params.hdfs_user,
- hadoop_bin_dir=params.hadoop_bin_dir,
- hadoop_conf_dir=params.hadoop_conf_dir
- )
- pass
-
-
-def get_tez_hdfs_dir_paths(tez_lib_uris = None):
- hdfs_path_prefix = 'hdfs://'
- lib_dir_paths = []
- if tez_lib_uris and tez_lib_uris.strip().find(hdfs_path_prefix, 0) != -1:
- dir_paths = tez_lib_uris.split(',')
- for path in dir_paths:
- if not "tez.tar.gz" in path:
- lib_dir_path = path.replace(hdfs_path_prefix, '')
- lib_dir_path = lib_dir_path if lib_dir_path.endswith(os.sep) else lib_dir_path + os.sep
- lib_dir_paths.append(lib_dir_path)
- else:
- lib_dir_path = path.replace(hdfs_path_prefix, '')
- lib_dir_paths.append(os.path.dirname(lib_dir_path))
- pass
- pass
-
- return lib_dir_paths
http://git-wip-us.apache.org/repos/asf/ambari/blob/271660a6/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/service_check.py
index e01cf47..f46c41f 100644
--- a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/service_check.py
+++ b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/service_check.py
@@ -28,7 +28,7 @@ class MahoutServiceCheck(Script):
env.set_params(params)
create_input_dir_cmd = format("fs -mkdir /user/{smokeuser}/mahoutsmokeinput")
- copy_file_to_hdfs_cmd = format("fs -put {tmp_dir}/sample-mahout-test.txt /user/{smokeuser}/mahoutsmokeinput/")
+ copy_test_file_to_hdfs_cmd = format("fs -put {tmp_dir}/sample-mahout-test.txt /user/{smokeuser}/mahoutsmokeinput/")
mahout_command = format("mahout seqdirectory --input /user/{smokeuser}/mahoutsmokeinput/sample-mahout-test.txt "
"--output /user/{smokeuser}/mahoutsmokeoutput/ --charset utf-8")
test_command = format("fs -test -e /user/{smokeuser}/mahoutsmokeoutput/_SUCCESS")
@@ -61,7 +61,7 @@ class MahoutServiceCheck(Script):
mode = 0755
)
- ExecuteHadoop( copy_file_to_hdfs_cmd,
+ ExecuteHadoop( copy_test_file_to_hdfs_cmd,
tries = 3,
try_sleep = 5,
user = params.smokeuser,
http://git-wip-us.apache.org/repos/asf/ambari/blob/271660a6/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/metainfo.xml b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/metainfo.xml
index 58c5d39..f42af02 100644
--- a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/metainfo.xml
+++ b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/metainfo.xml
@@ -59,7 +59,13 @@
</packages>
</osSpecific>
</osSpecifics>
-
+
+ <commandScript>
+ <script>scripts/service_check.py</script>
+ <scriptType>PYTHON</scriptType>
+ <timeout>300</timeout>
+ </commandScript>
+
<requiredServices>
<service>YARN</service>
</requiredServices>
http://git-wip-us.apache.org/repos/asf/ambari/blob/271660a6/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params.py b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params.py
index 5e6d254..b58bcd6 100644
--- a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params.py
@@ -23,6 +23,7 @@ from resource_management import *
# server configurations
config = Script.get_config()
+tmp_dir = Script.get_tmp_dir()
stack_name = default("/hostLevelParams/stack_name", None)
@@ -35,12 +36,17 @@ version = default("/commandParams/version", None)
if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
+ path_to_tez_examples_jar = "/usr/hdp/{hdp_version}/tez/tez-examples*.jar"
else:
hadoop_bin_dir = "/usr/bin"
+ path_to_tez_examples_jar = "/usr/lib/tez/tez-mapreduce-examples*.jar"
hadoop_conf_dir = "/etc/hadoop/conf"
kinit_path_local = functions.get_kinit_path()
security_enabled = config['configurations']['cluster-env']['security_enabled']
+smokeuser = config['configurations']['cluster-env']['smokeuser']
+smokeuser_principal = config['configurations']['cluster-env']['smokeuser_principal_name']
+smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
http://git-wip-us.apache.org/repos/asf/ambari/blob/271660a6/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py
new file mode 100644
index 0000000..434b505
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py
@@ -0,0 +1,98 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+from resource_management import *
+from resource_management.libraries.functions.version import compare_versions
+from resource_management.libraries.functions.dynamic_variable_interpretation import copy_tarballs_to_hdfs
+
+class TezServiceCheck(Script):
+ def service_check(self, env):
+ import params
+ env.set_params(params)
+
+ if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >= 0:
+ hdp_version = functions.get_hdp_version("hadoop-client")
+
+ path_to_tez_jar = format(params.path_to_tez_examples_jar)
+ copy_test_file_to_hdfs_cmd = format("fs -put {tmp_dir}/sample-tez-test /tmp/tezsmokeinput/")
+ create_input_dir_cmd = format("fs -mkdir /tmp/tezsmokeinput")
+ wordcount_command = format("jar {path_to_tez_jar} orderedwordcount "
+ "/tmp/tezsmokeinput/sample-tez-test /tmp/tezsmokeoutput/")
+ test_command = format("fs -test -e /tmp/tezsmokeoutput/_SUCCESS")
+ remove_output_input_dirs_cmd = "fs -rm -r -f /tmp/tezsmokeinput /tmp/tezsmokeoutput"
+
+
+ ExecuteHadoop( remove_output_input_dirs_cmd,
+ tries = 3,
+ try_sleep = 5,
+ user = params.smokeuser,
+ conf_dir = params.hadoop_conf_dir,
+ # for kinit run
+ keytab = params.smoke_user_keytab,
+ principal = params.smokeuser_principal,
+ security_enabled = params.security_enabled,
+ kinit_path_local = params.kinit_path_local,
+ bin_dir = params.hadoop_bin_dir
+ )
+
+ ExecuteHadoop( create_input_dir_cmd,
+ tries = 3,
+ try_sleep = 5,
+ user = params.smokeuser,
+ conf_dir = params.hadoop_conf_dir,
+ bin_dir = params.hadoop_bin_dir
+ )
+
+ File( format("{tmp_dir}/sample-tez-test"),
+ content = "foo\nbar\nfoo\nbar\nfoo",
+ mode = 0755
+ )
+
+ ExecuteHadoop( copy_test_file_to_hdfs_cmd,
+ tries = 3,
+ try_sleep = 5,
+ user = params.smokeuser,
+ conf_dir = params.hadoop_conf_dir,
+ bin_dir = params.hadoop_bin_dir
+ )
+
+ ExecuteHadoop( wordcount_command,
+ tries = 3,
+ try_sleep = 5,
+ user = params.smokeuser,
+ conf_dir = params.hadoop_conf_dir,
+ bin_dir = params.hadoop_bin_dir
+ )
+
+ ExecuteHadoop( test_command,
+ tries = 10,
+ try_sleep = 6,
+ user = params.smokeuser,
+ conf_dir = params.hadoop_conf_dir,
+ bin_dir = params.hadoop_bin_dir
+ )
+
+
+
+
+if __name__ == "__main__":
+ TezServiceCheck().execute()
+
http://git-wip-us.apache.org/repos/asf/ambari/blob/271660a6/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/install_jars.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/install_jars.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/install_jars.py
new file mode 100644
index 0000000..a18ca72
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/install_jars.py
@@ -0,0 +1,107 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+import os
+
+def install_tez_jars():
+ import params
+
+ destination_hdfs_dirs = get_tez_hdfs_dir_paths(params.tez_lib_uris)
+
+ # If tez libraries are to be stored in hdfs
+ if destination_hdfs_dirs:
+ for hdfs_dir in destination_hdfs_dirs:
+ params.HdfsDirectory(hdfs_dir,
+ action="create_delayed",
+ owner=params.tez_user,
+ mode=0755
+ )
+ pass
+ params.HdfsDirectory(None, action="create")
+
+ if params.security_enabled:
+ kinit_if_needed = format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_principal_name};")
+ else:
+ kinit_if_needed = ""
+
+ if kinit_if_needed:
+ Execute(kinit_if_needed,
+ user=params.tez_user,
+ path='/bin'
+ )
+ pass
+
+ app_dir_path = None
+ lib_dir_path = None
+
+ if len(destination_hdfs_dirs) > 0:
+ for path in destination_hdfs_dirs:
+ if 'lib' in path:
+ lib_dir_path = path
+ else:
+ app_dir_path = path
+ pass
+ pass
+ pass
+
+ if app_dir_path:
+ for scr_file, dest_file in params.app_dir_files.iteritems():
+ CopyFromLocal(scr_file,
+ mode=0755,
+ owner=params.tez_user,
+ dest_dir=app_dir_path,
+ dest_file=dest_file,
+ kinnit_if_needed=kinit_if_needed,
+ hdfs_user=params.hdfs_user,
+ hadoop_bin_dir=params.hadoop_bin_dir,
+ hadoop_conf_dir=params.hadoop_conf_dir
+ )
+
+ if lib_dir_path:
+ CopyFromLocal(params.tez_local_lib_jars,
+ mode=0755,
+ owner=params.tez_user,
+ dest_dir=lib_dir_path,
+ kinnit_if_needed=kinit_if_needed,
+ hdfs_user=params.hdfs_user,
+ hadoop_bin_dir=params.hadoop_bin_dir,
+ hadoop_conf_dir=params.hadoop_conf_dir
+ )
+ pass
+
+
+def get_tez_hdfs_dir_paths(tez_lib_uris = None):
+ hdfs_path_prefix = 'hdfs://'
+ lib_dir_paths = []
+ if tez_lib_uris and tez_lib_uris.strip().find(hdfs_path_prefix, 0) != -1:
+ dir_paths = tez_lib_uris.split(',')
+ for path in dir_paths:
+ if not "tez.tar.gz" in path:
+ lib_dir_path = path.replace(hdfs_path_prefix, '')
+ lib_dir_path = lib_dir_path if lib_dir_path.endswith(os.sep) else lib_dir_path + os.sep
+ lib_dir_paths.append(lib_dir_path)
+ else:
+ lib_dir_path = path.replace(hdfs_path_prefix, '')
+ lib_dir_paths.append(os.path.dirname(lib_dir_path))
+ pass
+ pass
+
+ return lib_dir_paths
http://git-wip-us.apache.org/repos/asf/ambari/blob/271660a6/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params.py
index 9a11fdb..7263b59 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params.py
@@ -186,6 +186,17 @@ mapreduce_jobhistory_intermediate_done_dir = config['configurations']['mapred-si
mapreduce_jobhistory_done_dir = config['configurations']['mapred-site']['mapreduce.jobhistory.done-dir']
jobhistory_heapsize = default("/configurations/mapred-env/jobhistory_heapsize", "900")
+# Tez-related properties
+tez_user = config['configurations']['tez-env']['tez_user']
+
+# Tez jars
+tez_local_api_jars = '/usr/lib/tez/tez*.jar'
+tez_local_lib_jars = '/usr/lib/tez/lib/*.jar'
+app_dir_files = {tez_local_api_jars:None}
+
+# Tez libraries
+tez_lib_uris = default("/configurations/tez-site/tez.lib.uris", None)
+
#for create_hdfs_directory
hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
http://git-wip-us.apache.org/repos/asf/ambari/blob/271660a6/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
index e5a26e7..e0f3b65 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
@@ -21,10 +21,11 @@ Ambari Agent
from resource_management import *
from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version
+from resource_management.libraries.functions.dynamic_variable_interpretation import copy_tarballs_to_hdfs
from resource_management.libraries.functions.security_commons import build_expectations, \
cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \
FILE_TYPE_XML
-
+from install_jars import install_tez_jars
from yarn import yarn
from service import service
@@ -56,6 +57,11 @@ class Resourcemanager(Script):
env.set_params(params)
self.configure(env) # FOR SECURITY
+ if params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.1') == 0:
+ install_tez_jars()
+ else:
+ # will work only for stack versions >=2.2
+ copy_tarballs_to_hdfs('tez', 'hadoop-yarn-resourcemanager', params.tez_user, params.hdfs_user, params.user_group)
service('resourcemanager',
action='start'
)
http://git-wip-us.apache.org/repos/asf/ambari/blob/271660a6/ambari-server/src/main/resources/stacks/HDP/2.1/role_command_order.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1/role_command_order.json b/ambari-server/src/main/resources/stacks/HDP/2.1/role_command_order.json
index 50505c9..ec38ee2 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1/role_command_order.json
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1/role_command_order.json
@@ -10,7 +10,8 @@
"STORM_REST_API-START" : ["NIMBUS-START", "STORM_UI_SERVER-START", "SUPERVISOR-START", "DRPC_SERVER-START"],
"STORM_SERVICE_CHECK-SERVICE_CHECK": ["NIMBUS-START", "SUPERVISOR-START", "STORM_UI_SERVER-START",
"DRPC_SERVER-START"],
- "NIMBUS-STOP" : ["SUPERVISOR-STOP", "STORM_UI_SERVER-STOP", "DRPC_SERVER-STOP"]
+ "NIMBUS-STOP" : ["SUPERVISOR-STOP", "STORM_UI_SERVER-STOP", "DRPC_SERVER-STOP"],
+ "TEZ_SERVICE_CHECK-SERVICE_CHECK": ["NODEMANAGER-START", "RESOURCEMANAGER-START"]
},
"_comment" : "Dependencies that are used when GLUSTERFS is not present in cluster",
"optional_no_glusterfs": {
http://git-wip-us.apache.org/repos/asf/ambari/blob/271660a6/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
index ab56a56..24150a8 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_server.py
@@ -57,7 +57,6 @@ class TestHiveServer(RMFTestCase):
)
self.assert_configure_default()
- self.printResources()
self.assertResourceCalled('Execute', 'hive --config /etc/hive/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020/apps/hive/warehouse ',
environment = {'PATH' : "/bin:/usr/lib/hive/bin:/usr/bin"},
user = 'hive',
@@ -183,56 +182,6 @@ class TestHiveServer(RMFTestCase):
self.assertFalse(socket_mock.called)
def assert_configure_default(self):
- self.assertResourceCalled('HdfsDirectory', '/apps/tez/',
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- mode = 0755,
- owner = 'tez',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', '/apps/tez/lib/',
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- mode = 0755,
- owner = 'tez',
- bin_dir = '/usr/bin',
- action = ['create_delayed'],
- )
- self.assertResourceCalled('HdfsDirectory', None,
- security_enabled = False,
- keytab = UnknownConfigurationMock(),
- conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- kinit_path_local = '/usr/bin/kinit',
- action = ['create'],
- bin_dir = '/usr/bin',
- )
- self.assertResourceCalled('CopyFromLocal', '/usr/lib/tez/tez*.jar',
- hadoop_bin_dir = '/usr/bin',
- hdfs_user = 'hdfs',
- owner = 'tez',
- dest_file = None,
- kinnit_if_needed = '',
- dest_dir = '/apps/tez/',
- hadoop_conf_dir = '/etc/hadoop/conf',
- mode = 0755,
- )
- self.assertResourceCalled('CopyFromLocal', '/usr/lib/tez/lib/*.jar',
- hadoop_conf_dir = '/etc/hadoop/conf',
- hdfs_user = 'hdfs',
- owner = 'tez',
- kinnit_if_needed = '',
- dest_dir = '/apps/tez/lib/',
- hadoop_bin_dir = '/usr/bin',
- mode = 0755,
- )
self.assertResourceCalled('HdfsDirectory', '/apps/hive/warehouse',
security_enabled = False,
keytab = UnknownConfigurationMock(),
@@ -335,33 +284,34 @@ class TestHiveServer(RMFTestCase):
sudo = True,
)
self.assertResourceCalled('File', '/usr/lib/hive/lib//mysql-connector-java.jar',
- mode = 0644,
- )
+ mode = 0644,
+ )
self.assertResourceCalled('File', '/usr/lib/ambari-agent/DBConnectionVerification.jar',
- content = DownloadSource('http://c6401.ambari.apache.org:8080/resources/DBConnectionVerification.jar'),
- )
+ content = DownloadSource('http://c6401.ambari.apache.org:8080/resources'
+ '/DBConnectionVerification.jar'),
+ )
self.assertResourceCalled('File', '/tmp/start_hiveserver2_script',
content = Template('startHiveserver2.sh.j2'),
mode = 0755,
)
self.assertResourceCalled('Directory', '/var/run/hive',
owner = 'hive',
- group = 'hadoop',
mode = 0755,
+ group = 'hadoop',
recursive = True,
cd_access = 'a',
)
self.assertResourceCalled('Directory', '/var/log/hive',
owner = 'hive',
- group = 'hadoop',
mode = 0755,
+ group = 'hadoop',
recursive = True,
cd_access = 'a',
)
self.assertResourceCalled('Directory', '/var/lib/hive',
owner = 'hive',
- group = 'hadoop',
mode = 0755,
+ group = 'hadoop',
recursive = True,
cd_access = 'a',
)
http://git-wip-us.apache.org/repos/asf/ambari/blob/271660a6/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
index d32fc1a..184a210 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
@@ -51,18 +51,19 @@ class TestResourceManager(RMFTestCase):
self.assert_configure_default()
- pid_check_cmd = 'ls /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid` >/dev/null 2>&1'
-
self.assertResourceCalled('File', '/var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid',
- not_if=pid_check_cmd,
- action=['delete'])
+ action = ['delete'],
+ not_if = 'ls /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid` >/dev/null 2>&1',
+ )
self.assertResourceCalled('Execute', 'ulimit -c unlimited; export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec && /usr/lib/hadoop-yarn/sbin/yarn-daemon.sh --config /etc/hadoop/conf start resourcemanager',
- not_if=pid_check_cmd,
- user='yarn')
- self.assertResourceCalled('Execute', pid_check_cmd,
- user='yarn',
- not_if=pid_check_cmd,
- initial_wait=5)
+ not_if = 'ls /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid` >/dev/null 2>&1',
+ user = 'yarn',
+ )
+ self.assertResourceCalled('Execute', 'ls /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid` >/dev/null 2>&1',
+ initial_wait = 5,
+ not_if = 'ls /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid >/dev/null 2>&1 && ps -p `cat /var/run/hadoop-yarn/yarn/yarn-yarn-resourcemanager.pid` >/dev/null 2>&1',
+ user = 'yarn',
+ )
self.assertNoMoreResources()
def test_stop_default(self):
@@ -571,4 +572,4 @@ class TestResourceManager(RMFTestCase):
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
- put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
\ No newline at end of file
+ put_structured_out_mock.assert_called_with({"securityState": "UNSECURED"})
http://git-wip-us.apache.org/repos/asf/ambari/blob/271660a6/ambari-server/src/test/python/stacks/2.1/TEZ/test_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/TEZ/test_service_check.py b/ambari-server/src/test/python/stacks/2.1/TEZ/test_service_check.py
new file mode 100644
index 0000000..cf240b0
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.1/TEZ/test_service_check.py
@@ -0,0 +1,81 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership. The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+from stacks.utils.RMFTestCase import *
+
+
+class TestFalconServer(RMFTestCase):
+ COMMON_SERVICES_PACKAGE_DIR = "TEZ/0.4.0.2.1/package"
+ STACK_VERSION = "2.1"
+
+ def test_service_check(self):
+ self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/service_check.py",
+ classname="TezServiceCheck",
+ command="service_check",
+ config_file="default.json",
+ hdp_stack_version = self.STACK_VERSION,
+ target = RMFTestCase.TARGET_COMMON_SERVICES
+ )
+ self.assertResourceCalled('ExecuteHadoop', 'fs -rm -r -f /tmp/tezsmokeinput /tmp/tezsmokeoutput',
+ security_enabled = False,
+ keytab = UnknownConfigurationMock(),
+ conf_dir = '/etc/hadoop/conf',
+ try_sleep = 5,
+ kinit_path_local = '/usr/bin/kinit',
+ tries = 3,
+ user = 'ambari-qa',
+ bin_dir = '/usr/bin',
+ principal = UnknownConfigurationMock(),
+ )
+ self.assertResourceCalled('ExecuteHadoop', 'fs -mkdir /tmp/tezsmokeinput',
+ try_sleep = 5,
+ tries = 3,
+ bin_dir = '/usr/bin',
+ user = 'ambari-qa',
+ conf_dir = '/etc/hadoop/conf',
+ )
+ self.assertResourceCalled('File', '/tmp/sample-tez-test',
+ content = 'foo\nbar\nfoo\nbar\nfoo',
+ mode = 0755,
+ )
+ self.assertResourceCalled('ExecuteHadoop', 'fs -put /tmp/sample-tez-test /tmp/tezsmokeinput/',
+ try_sleep = 5,
+ tries = 3,
+ bin_dir = '/usr/bin',
+ user = 'ambari-qa',
+ conf_dir = '/etc/hadoop/conf',
+ )
+ self.assertResourceCalled('ExecuteHadoop', 'jar /usr/lib/tez/tez-mapreduce-examples*.jar orderedwordcount /tmp/tezsmokeinput/sample-tez-test /tmp/tezsmokeoutput/',
+ try_sleep = 5,
+ tries = 3,
+ bin_dir = '/usr/bin',
+ user = 'ambari-qa',
+ conf_dir = '/etc/hadoop/conf',
+ )
+ self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /tmp/tezsmokeoutput/_SUCCESS',
+ try_sleep = 6,
+ tries = 10,
+ bin_dir = '/usr/bin',
+ user = 'ambari-qa',
+ conf_dir = '/etc/hadoop/conf',
+ )
+ self.assertNoMoreResources()
+
+
http://git-wip-us.apache.org/repos/asf/ambari/blob/271660a6/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py b/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py
index 3d74113..74273a7 100644
--- a/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py
+++ b/ambari-server/src/test/python/stacks/2.1/TEZ/test_tez_client.py
@@ -60,8 +60,8 @@ class TestTezClient(RMFTestCase):
self.assertNoMoreResources()
-
- def test_upgrade(self):
+ @patch("resource_management.libraries.functions.get_hdp_version")
+ def test_upgrade(self, get_hdp_version_mock):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/tez_client.py",
classname = "TezClient",
command = "restart",
@@ -69,6 +69,7 @@ class TestTezClient(RMFTestCase):
hdp_stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES)
+ get_hdp_version_mock.return_value = "2.2.1.0-2067"
self.assertResourceCalled("Execute", "hdp-select set hadoop-client 2.2.1.0-2067")
# for now, it's enough that hdp-select is confirmed