You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by al...@apache.org on 2015/08/12 22:10:02 UTC
ambari git commit: AMBARI-12349. Datanode failed to start when use
non-default dfs.datanode.data.dir.mount.file or net.topology.script.file.name
(Di Li via alejandro)
Repository: ambari
Updated Branches:
refs/heads/trunk 9e2e2b0d7 -> f04e4944d
AMBARI-12349. Datanode failed to start when use non-default dfs.datanode.data.dir.mount.file or net.topology.script.file.name (Di Li via alejandro)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f04e4944
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f04e4944
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f04e4944
Branch: refs/heads/trunk
Commit: f04e4944d3de22037ab04767be7da1d7678a25f6
Parents: 9e2e2b0
Author: Alejandro Fernandez <af...@hortonworks.com>
Authored: Wed Aug 12 13:08:23 2015 -0700
Committer: Alejandro Fernandez <af...@hortonworks.com>
Committed: Wed Aug 12 13:08:23 2015 -0700
----------------------------------------------------------------------
.../before-START/scripts/rack_awareness.py | 29 ++++++-
.../hooks/before-START/test_before_start.py | 81 ++++++++++++++++++++
2 files changed, 106 insertions(+), 4 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/f04e4944/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/rack_awareness.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/rack_awareness.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/rack_awareness.py
index 0b18ecb..453de63 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/rack_awareness.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/rack_awareness.py
@@ -17,15 +17,26 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
-from resource_management.core.resources import File
+from resource_management.core.resources import File,Directory
from resource_management.core.source import StaticFile, Template
from resource_management.libraries.functions import format
+import os
+
def create_topology_mapping():
import params
+ path=params.net_topology_mapping_data_file_path
+ parent_dir=os.path.dirname(path)
+ # only create the parent directory and set its permission if it does not exist
+ if not os.path.exists(parent_dir):
+ Directory(parent_dir,
+ recursive=True,
+ owner=params.hdfs_user,
+ group=params.user_group)
+
# placing the mappings file in the same folder where the topology script is located
- File(params.net_topology_mapping_data_file_path,
+ File(path,
content=Template("topology_mappings.data.j2"),
owner=params.hdfs_user,
group=params.user_group,
@@ -35,8 +46,18 @@ def create_topology_mapping():
def create_topology_script():
import params
+
+ path=params.net_topology_script_file_path
+ parent_dir=os.path.dirname(path)
+ # only create the parent directory and set its permission if it does not exist
+ if not os.path.exists(parent_dir):
+ Directory(parent_dir,
+ recursive=True,
+ owner=params.hdfs_user,
+ group=params.user_group)
+
# installing the topology script to the specified location
- File(params.net_topology_script_file_path,
+ File(path,
content=StaticFile('topology_script.py'),
mode=0755,
only_if=format("test -d {net_topology_script_dir}"),
@@ -46,4 +67,4 @@ def create_topology_script_and_mapping():
import params
if params.has_hadoop_env:
create_topology_mapping()
- create_topology_script()
\ No newline at end of file
+ create_topology_script()
http://git-wip-us.apache.org/repos/asf/ambari/blob/f04e4944/ambari-server/src/test/python/stacks/2.0.6/hooks/before-START/test_before_start.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/hooks/before-START/test_before_start.py b/ambari-server/src/test/python/stacks/2.0.6/hooks/before-START/test_before_start.py
index 00022ae..504fd04 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/hooks/before-START/test_before_start.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/hooks/before-START/test_before_start.py
@@ -251,6 +251,87 @@ class TestHookBeforeStart(RMFTestCase):
)
self.assertNoMoreResources()
+ def test_hook_refresh_topology_custom_directories(self):
+ config_file = "stacks/2.0.6/configs/default.json"
+ with open(config_file, "r") as f:
+ default_json = json.load(f)
+
+ default_json['serviceName'] = 'HDFS'
+ default_json['configurations']['core-site']['net.topology.script.file.name'] = '/home/myhadoop/hadoop/conf.hadoop/topology_script.py'
+
+ self.executeScript("2.0.6/hooks/before-START/scripts/hook.py",
+ classname="BeforeStartHook",
+ command="hook",
+ config_dict=default_json
+ )
+ self.assertResourceCalled('Execute', ('setenforce', '0'),
+ only_if = 'test -f /selinux/enforce',
+ not_if = "(! which getenforce ) || (which getenforce && getenforce | grep -q Disabled)",
+ sudo=True,
+ )
+ self.assertResourceCalled('Directory', '/var/log/hadoop',
+ owner = 'root',
+ group = 'hadoop',
+ mode = 0775,
+ recursive = True,
+ cd_access = 'a',
+ )
+ self.assertResourceCalled('Directory', '/var/run/hadoop',
+ owner = 'root',
+ group = 'root',
+ recursive = True,
+ cd_access = 'a',
+ )
+ self.assertResourceCalled('Directory', '/tmp/hadoop-hdfs',
+ owner = 'hdfs',
+ recursive = True,
+ cd_access = 'a',
+ )
+ self.assertResourceCalled('File', '/etc/hadoop/conf/commons-logging.properties',
+ content = Template('commons-logging.properties.j2'),
+ owner = 'hdfs',
+ )
+ self.assertResourceCalled('File', '/etc/hadoop/conf/health_check',
+ content = Template('health_check.j2'),
+ owner = 'hdfs',
+ )
+ self.assertResourceCalled('File',
+ '/etc/hadoop/conf/log4j.properties',
+ mode=0644,
+ group='hadoop',
+ owner='hdfs',
+ content='log4jproperties\nline2log4jproperties\nline2'
+ )
+ self.assertResourceCalled('File', '/etc/hadoop/conf/hadoop-metrics2.properties',
+ content = Template('hadoop-metrics2.properties.j2'),
+ owner = 'hdfs',
+ )
+ self.assertResourceCalled('File', '/etc/hadoop/conf/task-log4j.properties',
+ content = StaticFile('task-log4j.properties'),
+ mode = 0755,
+ )
+ self.assertResourceCalled('File', '/etc/hadoop/conf/configuration.xsl',
+ owner = 'hdfs',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('File', '/etc/hadoop/conf/masters',
+ owner = 'hdfs',
+ group = 'hadoop',
+ )
+ self.assertResourceCalled('File', '/etc/hadoop/conf/topology_mappings.data',
+ owner = 'hdfs',
+ content = Template('topology_mappings.data.j2'),
+ group = 'hadoop',
+ only_if = 'test -d /etc/hadoop/conf'
+ )
+ self.assertResourceCalled('File', '/etc/hadoop/conf/topology_script.py',
+ content = StaticFile('topology_script.py'),
+ mode = 0755,
+ only_if = 'test -d /etc/hadoop/conf'
+ )
+ self.assertNoMoreResources()
+
+
def test_that_jce_is_required_in_secured_cluster(self):
try:
self.executeScript("2.0.6/hooks/before-START/scripts/hook.py",