You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by mp...@apache.org on 2014/08/19 19:18:15 UTC
git commit: AMBARI-6879. Hadoop env generated via tarball on client
config download is incorrect and missing java properties. (mpapirkovskyy)
Repository: ambari
Updated Branches:
refs/heads/trunk 274567507 -> c9fbc849f
AMBARI-6879. Hadoop env generated via tarball on client config download is incorrect and missing java properties. (mpapirkovskyy)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c9fbc849
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c9fbc849
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c9fbc849
Branch: refs/heads/trunk
Commit: c9fbc849f8a617a834d979c66c185cf0c364c694
Parents: 2745675
Author: Myroslav Papirkovskyy <mp...@hortonworks.com>
Authored: Tue Aug 19 20:16:52 2014 +0300
Committer: Myroslav Papirkovskyy <mp...@hortonworks.com>
Committed: Tue Aug 19 20:18:13 2014 +0300
----------------------------------------------------------------------
.../services/HDFS/package/scripts/params.py | 27 ++++++++++++++++++
.../services/HDFS/package/scripts/params.py | 29 ++++++++++++++++++++
2 files changed, 56 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/c9fbc849/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py
index 82b9cc4..c68c982 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py
@@ -150,3 +150,30 @@ if not "com.hadoop.compression.lzo" in io_compression_codecs:
exclude_packages = ["lzo", "hadoop-lzo", "hadoop-lzo-native"]
else:
exclude_packages = []
+
+
+java_home = config['hostLevelParams']['java_home']
+#hadoop params
+
+hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
+hadoop_env_sh_template = config['configurations']['hadoop-env']['content']
+
+#hadoop-env.sh
+if System.get_instance().os_family == "suse":
+ jsvc_path = "/usr/lib/bigtop-utils"
+else:
+ jsvc_path = "/usr/libexec/bigtop-utils"
+hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
+namenode_heapsize = config['configurations']['hadoop-env']['namenode_heapsize']
+namenode_opt_newsize = config['configurations']['hadoop-env']['namenode_opt_newsize']
+namenode_opt_maxnewsize = config['configurations']['hadoop-env']['namenode_opt_maxnewsize']
+
+jtnode_opt_newsize = default("/configurations/mapred-env/jtnode_opt_newsize","200m")
+jtnode_opt_maxnewsize = default("/configurations/mapred-env/jtnode_opt_maxnewsize","200m")
+jtnode_heapsize = default("/configurations/mapred-env/jtnode_heapsize","1024m")
+ttnode_heapsize = default("/configurations/mapred-env/ttnode_heapsize","1024m")
+
+dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize']
+
+mapred_pid_dir_prefix = default("/configurations/hadoop-env/mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
+mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/ambari/blob/c9fbc849/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
index 9a5e393..6ad04b3 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
@@ -185,3 +185,32 @@ if not "com.hadoop.compression.lzo" in io_compression_codecs:
else:
exclude_packages = []
name_node_params = default("/commandParams/namenode", None)
+
+#hadoop params
+hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
+
+hadoop_env_sh_template = config['configurations']['hadoop-env']['content']
+
+#hadoop-env.sh
+java_home = config['hostLevelParams']['java_home']
+
+if str(config['hostLevelParams']['stack_version']).startswith('2.0') and System.get_instance().os_family != "suse":
+ # deprecated rhel jsvc_path
+ jsvc_path = "/usr/libexec/bigtop-utils"
+else:
+ jsvc_path = "/usr/lib/bigtop-utils"
+
+hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
+namenode_heapsize = config['configurations']['hadoop-env']['namenode_heapsize']
+namenode_opt_newsize = config['configurations']['hadoop-env']['namenode_opt_newsize']
+namenode_opt_maxnewsize = config['configurations']['hadoop-env']['namenode_opt_maxnewsize']
+
+jtnode_opt_newsize = "200m"
+jtnode_opt_maxnewsize = "200m"
+jtnode_heapsize = "1024m"
+ttnode_heapsize = "1024m"
+
+dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize']
+mapred_pid_dir_prefix = default("/configurations/mapred-env/mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
+mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
+mapred_log_dir_prefix = default("/configurations/mapred-env/mapred_log_dir_prefix","/var/log/hadoop-mapreduce")