You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@bigtop.apache.org by gu...@apache.org on 2022/07/08 06:46:08 UTC

[bigtop] branch master updated: BIGTOP-3718: Fix MapReduce2 service-check in Amabri Mpack (#932)

This is an automated email from the ASF dual-hosted git repository.

guyuqi pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/bigtop.git


The following commit(s) were added to refs/heads/master by this push:
     new 61bfcafd BIGTOP-3718: Fix MapReduce2 service-check in Amabri Mpack (#932)
61bfcafd is described below

commit 61bfcafd62ce12d1f3e94eab88cd9ab22d567d31
Author: Yuqi Gu <yu...@arm.com>
AuthorDate: Fri Jul 8 14:46:02 2022 +0800

    BIGTOP-3718: Fix MapReduce2 service-check in Amabri Mpack (#932)
    
    The patch is to fix the issues of checking MapReduce2 service and
    "JAR does not exist".
    
    Change-Id: I59ad4d3ecbe88af7bea25bf7177502ebb3e2cf6b
    Signed-off-by: Yuqi Gu <yu...@arm.com>
---
 .../services/YARN/package/scripts/mapred_service_check.py | 15 ++++++++-------
 .../1.0/services/YARN/package/scripts/params_linux.py     | 11 +++++++----
 .../1.0/services/YARN/package/scripts/params_windows.py   |  2 +-
 3 files changed, 16 insertions(+), 12 deletions(-)

diff --git a/bigtop-packages/src/common/bigtop-ambari-mpack/bgtp-ambari-mpack/src/main/resources/stacks/BGTP/1.0/services/YARN/package/scripts/mapred_service_check.py b/bigtop-packages/src/common/bigtop-ambari-mpack/bgtp-ambari-mpack/src/main/resources/stacks/BGTP/1.0/services/YARN/package/scripts/mapred_service_check.py
index 6288ac0f..2d47f494 100644
--- a/bigtop-packages/src/common/bigtop-ambari-mpack/bgtp-ambari-mpack/src/main/resources/stacks/BGTP/1.0/services/YARN/package/scripts/mapred_service_check.py
+++ b/bigtop-packages/src/common/bigtop-ambari-mpack/bgtp-ambari-mpack/src/main/resources/stacks/BGTP/1.0/services/YARN/package/scripts/mapred_service_check.py
@@ -119,11 +119,18 @@ class MapReduce2ServiceCheckDefault(MapReduce2ServiceCheck):
     env.set_params(params)
 
     jar_path = format("{hadoop_mapred2_jar_location}/{hadoopMapredExamplesJarName}")
+    source_file = format("/etc/passwd")
     input_file = format("/user/{smokeuser}/mapredsmokeinput")
     output_file = format("/user/{smokeuser}/mapredsmokeoutput")
 
-    test_cmd = format("fs -test -e {output_file}")
+    hdfs_put_cmd = format("fs -put {source_file} {input_file}")
     run_wordcount_job = format("jar {jar_path} wordcount {input_file} {output_file}")
+    test_cmd = format("fs -test -e {output_file}")
+
+    ExecuteHadoop(hdfs_put_cmd,
+                  user=params.smokeuser,
+                  bin_dir=params.execute_path,
+                  conf_dir=params.hadoop_conf_dir)
 
     params.HdfsResource(format("/user/{smokeuser}"),
                       type="directory",
@@ -136,12 +143,6 @@ class MapReduce2ServiceCheckDefault(MapReduce2ServiceCheck):
                         type = "directory",
                         dfs_type = params.dfs_type,
     )
-    params.HdfsResource(input_file,
-                        action = "create_on_execute",
-                        type = "file",
-                        source = "/etc/passwd",
-                        dfs_type = params.dfs_type,
-    )
     params.HdfsResource(None, action="execute")
 
     # initialize the ticket
diff --git a/bigtop-packages/src/common/bigtop-ambari-mpack/bgtp-ambari-mpack/src/main/resources/stacks/BGTP/1.0/services/YARN/package/scripts/params_linux.py b/bigtop-packages/src/common/bigtop-ambari-mpack/bgtp-ambari-mpack/src/main/resources/stacks/BGTP/1.0/services/YARN/package/scripts/params_linux.py
index 5b4177c9..d5859fd7 100644
--- a/bigtop-packages/src/common/bigtop-ambari-mpack/bgtp-ambari-mpack/src/main/resources/stacks/BGTP/1.0/services/YARN/package/scripts/params_linux.py
+++ b/bigtop-packages/src/common/bigtop-ambari-mpack/bgtp-ambari-mpack/src/main/resources/stacks/BGTP/1.0/services/YARN/package/scripts/params_linux.py
@@ -127,8 +127,11 @@ def get_spark_version(service_name, component_name, yarn_version):
 # these are used to render the classpath for picking up Spark classes
 # in the event that spark is not installed, then we must default to the vesrion of YARN installed
 # since it will still load classes from its own spark version
-spark_version = get_spark_version("SPARK", "SPARK_CLIENT", version)
-spark2_version = get_spark_version("SPARK2", "SPARK2_CLIENT", version)
+
+# No Spark services in current Mpack;
+# TODO: Add Spark into stack;
+#spark_version = get_spark_version("SPARK", "SPARK_CLIENT", version)
+#spark2_version = get_spark_version("SPARK2", "SPARK2_CLIENT", version)
 
 stack_supports_ranger_kerberos = check_stack_feature(StackFeature.RANGER_KERBEROS_SUPPORT, version_for_stack_feature_checks)
 stack_supports_ranger_audit_db = check_stack_feature(StackFeature.RANGER_AUDIT_DB_SUPPORT, version_for_stack_feature_checks)
@@ -294,8 +297,8 @@ nm_log_dirs_list = nm_log_dirs.split(',')
 nm_log_dir_to_mount_file = "/var/lib/ambari-agent/data/yarn/yarn_log_dir_mount.hist"
 nm_local_dir_to_mount_file = "/var/lib/ambari-agent/data/yarn/yarn_local_dir_mount.hist"
 
-distrAppJarName = "hadoop-yarn-applications-distributedshell-2.*.jar"
-hadoopMapredExamplesJarName = "hadoop-mapreduce-examples-2.*.jar"
+distrAppJarName = "hadoop-yarn-applications-distributedshell-3.*.jar"
+hadoopMapredExamplesJarName = "hadoop-mapreduce-examples-3.*.jar"
 
 entity_file_history_directory = "/tmp/entity-file-history/active"
 
diff --git a/bigtop-packages/src/common/bigtop-ambari-mpack/bgtp-ambari-mpack/src/main/resources/stacks/BGTP/1.0/services/YARN/package/scripts/params_windows.py b/bigtop-packages/src/common/bigtop-ambari-mpack/bgtp-ambari-mpack/src/main/resources/stacks/BGTP/1.0/services/YARN/package/scripts/params_windows.py
index 1e6d3cf2..549872c7 100644
--- a/bigtop-packages/src/common/bigtop-ambari-mpack/bgtp-ambari-mpack/src/main/resources/stacks/BGTP/1.0/services/YARN/package/scripts/params_windows.py
+++ b/bigtop-packages/src/common/bigtop-ambari-mpack/bgtp-ambari-mpack/src/main/resources/stacks/BGTP/1.0/services/YARN/package/scripts/params_windows.py
@@ -55,7 +55,7 @@ hs_port = config['configurations']['mapred-site']['mapreduce.jobhistory.webapp.a
 hs_webui_address = format("{hs_host}:{hs_port}")
 
 hadoop_mapred2_jar_location = os.path.join(os.environ["HADOOP_COMMON_HOME"], "share", "hadoop", "mapreduce")
-hadoopMapredExamplesJarName = "hadoop-mapreduce-examples-2.*.jar"
+hadoopMapredExamplesJarName = "hadoop-mapreduce-examples-3.*.jar"
 
 exclude_hosts = default("/clusterHostInfo/decom_nm_hosts", [])
 exclude_file_path = default("/configurations/yarn-site/yarn.resourcemanager.nodes.exclude-path","/etc/hadoop/conf/yarn.exclude")