You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ao...@apache.org on 2014/02/11 18:41:49 UTC

[3/3] git commit: AMBARI-4583. Extract common hdfs directories creation code per services (Eugene Chekanskiy via aonishuk)

AMBARI-4583. Extract common hdfs directories creation code per services
(Eugene Chekanskiy via aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e66d84f8
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e66d84f8
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e66d84f8

Branch: refs/heads/trunk
Commit: e66d84f8a8fb82bb31cc6f7b220a422443541dfe
Parents: 7f834d4
Author: Andrew Onischuk <ao...@hortonworks.com>
Authored: Tue Feb 11 09:40:13 2014 -0800
Committer: Andrew Onischuk <ao...@hortonworks.com>
Committed: Tue Feb 11 09:40:13 2014 -0800

----------------------------------------------------------------------
 .../python/resource_management/TestScript.py    |   6 +-
 .../HDFS/package/scripts/hdfs_namenode.py       | 137 +++++--------------
 .../stacks/2.1.1/STORM/test_service_check.py    |   1 -
 3 files changed, 35 insertions(+), 109 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e66d84f8/ambari-agent/src/test/python/resource_management/TestScript.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/resource_management/TestScript.py b/ambari-agent/src/test/python/resource_management/TestScript.py
index 5b34fae..23bbef1 100644
--- a/ambari-agent/src/test/python/resource_management/TestScript.py
+++ b/ambari-agent/src/test/python/resource_management/TestScript.py
@@ -73,7 +73,7 @@ class TestScript(TestCase):
       Script.config = no_packages_config
       script.install_packages(env)
     resource_dump = pprint.pformat(env.resource_list)
-    self.assertEquals(resource_dump, "[Repository['HDP-2.0._'], Repository['HDP-2.0._']]")
+    self.assertEquals(resource_dump, "[Repository['HDP-2.0._']]")
 
     # Testing empty package list
     with Environment(".", test_mode=True) as env:
@@ -81,14 +81,14 @@ class TestScript(TestCase):
       Script.config = empty_config
       script.install_packages(env)
     resource_dump = pprint.pformat(env.resource_list)
-    self.assertEquals(resource_dump, "[Repository['HDP-2.0._'], Repository['HDP-2.0._']]")
+    self.assertEquals(resource_dump, "[Repository['HDP-2.0._']]")
 
     # Testing installing of a list of packages
     with Environment(".", test_mode=True) as env:
       Script.config = dummy_config
       script.install_packages("env")
     resource_dump = pprint.pformat(env.resource_list)
-    self.assertEqual(resource_dump, "[Repository['HDP-2.0._'],\n Repository['HDP-2.0._'],\n Package['hbase'],\n Package['yet-another-package'],\n Repository['HDP-2.0._'],\n Repository['HDP-2.0._']]")
+    self.assertEqual(resource_dump, "[Repository['HDP-2.0._'],\n Repository['HDP-2.0._'],\n Package['hbase'],\n Package['yet-another-package']]")
 
   @patch("__builtin__.open")
   def test_structured_out(self, open_mock):

http://git-wip-us.apache.org/repos/asf/ambari/blob/e66d84f8/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py
index 2d2c8f9..0dad995 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py
@@ -19,11 +19,9 @@ limitations under the License.
 
 from resource_management import *
 from utils import service
-from utils import hdfs_directory
-import urlparse
 
 
-def namenode(action=None, format=True):
+def namenode(action=None, do_format=True):
   import params
   #we need this directory to be present before any action(HA manual steps for
   #additional namenode)
@@ -31,7 +29,7 @@ def namenode(action=None, format=True):
     create_name_dirs(params.dfs_name_dir)
 
   if action == "start":
-    if format:
+    if do_format:
       format_namenode()
       pass
 
@@ -48,11 +46,22 @@ def namenode(action=None, format=True):
       create_log_dir=True,
       principal=params.dfs_namenode_kerberos_principal
     )
+    if params.dfs_ha_enabled:
+      dfs_check_nn_status_cmd = format("su - {hdfs_user} -c 'hdfs haadmin -getServiceState {namenode_id} | grep active > /dev/null'")
+    else:
+      dfs_check_nn_status_cmd = None
 
-    # TODO: extract creating of dirs to different services
-    create_app_directories()
-    create_user_directories()
+    namenode_safe_mode_off = format("su - {hdfs_user} -c 'hadoop dfsadmin -safemode get' | grep 'Safe mode is OFF'")
 
+    if params.security_enabled:
+      Execute(format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_user}"),
+              user = params.hdfs_user)
+    Execute(namenode_safe_mode_off,
+            tries=40,
+            try_sleep=10,
+            only_if=dfs_check_nn_status_cmd #skip when HA not active
+    )
+    create_hdfs_directories(dfs_check_nn_status_cmd)
   if action == "stop":
     service(
       action="stop", name="namenode", user=params.hdfs_user,
@@ -63,7 +72,6 @@ def namenode(action=None, format=True):
   if action == "decommission":
     decommission()
 
-
 def create_name_dirs(directories):
   import params
 
@@ -76,100 +84,22 @@ def create_name_dirs(directories):
   )
 
 
-def create_app_directories():
+def create_hdfs_directories(check):
   import params
 
-  hdfs_directory(name="/tmp",
-                 owner=params.hdfs_user,
-                 mode="777"
+  params.HdfsDirectory("/tmp",
+                       action="create_delayed",
+                       owner=params.hdfs_user,
+                       mode=0777
   )
-  #mapred directories
-  if params.has_histroryserver:
-    hdfs_directory(name="/mapred",
-                   owner=params.mapred_user
-    )
-    hdfs_directory(name="/mapred/system",
-                   owner=params.hdfs_user
-    )
-    #hbase directories
-  if len(params.hbase_master_hosts) != 0:
-    hdfs_directory(name=params.hbase_hdfs_root_dir,
-                   owner=params.hbase_user
-    )
-    hdfs_directory(name=params.hbase_staging_dir,
-                   owner=params.hbase_user,
-                   mode="711"
-    )
-    #hive directories
-  if len(params.hive_server_host) != 0:
-    hdfs_directory(name=params.hive_apps_whs_dir,
-                   owner=params.hive_user,
-                   mode="777"
-    )
-  if len(params.hcat_server_hosts) != 0:
-    hdfs_directory(name=params.webhcat_apps_dir,
-                   owner=params.webhcat_user,
-                   mode="755"
-    )
-  if len(params.hs_host) != 0:
-    if params.yarn_log_aggregation_enabled:
-      hdfs_directory(name=params.yarn_nm_app_log_dir,
-                     owner=params.yarn_user,
-                     group=params.user_group,
-                     mode="777",
-                     recursive_chmod=True
-      )
-    hdfs_directory(name=params.mapreduce_jobhistory_intermediate_done_dir,
-                   owner=params.mapred_user,
-                   group=params.user_group,
-                   mode="777"
-    )
-
-    hdfs_directory(name=params.mapreduce_jobhistory_done_dir,
-                   owner=params.mapred_user,
-                   group=params.user_group,
-                   mode="1777"
-    )
-
-  if params.has_falcon_host:
-    if params.falcon_store_uri[0:4] == "hdfs":
-      hdfs_directory(name=params.store_uri,
-                     owner=params.falcon_user,
-                     mode="755"
-      )
-
-
-def create_user_directories():
-  import params
-
-  hdfs_directory(name=params.smoke_hdfs_user_dir,
-                 owner=params.smoke_user,
-                 mode=params.smoke_hdfs_user_mode
+  params.HdfsDirectory(params.smoke_hdfs_user_dir,
+                       action="create_delayed",
+                       owner=params.smoke_user,
+                       mode=params.smoke_hdfs_user_mode
+  )
+  params.HdfsDirectory(None, action="create",
+                       only_if=check #skip creation when HA not active
   )
-
-  if params.has_hive_server_host:
-    hdfs_directory(name=params.hive_hdfs_user_dir,
-                   owner=params.hive_user,
-                   mode=params.hive_hdfs_user_mode
-    )
-
-  if params.has_hcat_server_host:
-    if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
-      hdfs_directory(name=params.hcat_hdfs_user_dir,
-                     owner=params.hcat_user,
-                     mode=params.hcat_hdfs_user_mode
-      )
-    hdfs_directory(name=params.webhcat_hdfs_user_dir,
-                   owner=params.webhcat_user,
-                   mode=params.webhcat_hdfs_user_mode
-    )
-
-  if params.has_oozie_server:
-    hdfs_directory(name=params.oozie_hdfs_user_dir,
-                   owner=params.oozie_user,
-                   mode=params.oozie_hdfs_user_mode
-    )
-
 
 def format_namenode(force=None):
   import params
@@ -208,10 +138,7 @@ def decommission():
        group=user_group
   )
 
-  if params.update_exclude_file_only == False:
-    ExecuteHadoop('dfsadmin -refreshNodes',
-                  user=hdfs_user,
-                  conf_dir=conf_dir,
-                  kinit_override=True)
-    pass
-  pass
+  ExecuteHadoop('dfsadmin -refreshNodes',
+                user=hdfs_user,
+                conf_dir=conf_dir,
+                kinit_override=True)

http://git-wip-us.apache.org/repos/asf/ambari/blob/e66d84f8/ambari-server/src/test/python/stacks/2.1.1/STORM/test_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1.1/STORM/test_service_check.py b/ambari-server/src/test/python/stacks/2.1.1/STORM/test_service_check.py
index 94d4744..fdc8305 100644
--- a/ambari-server/src/test/python/stacks/2.1.1/STORM/test_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.1.1/STORM/test_service_check.py
@@ -32,7 +32,6 @@ class TestStormServiceCheck(RMFTestCase):
                        command="service_check",
                        config_file="default.json"
     )
-    self.printResources()
 
     self.assertResourceCalled('File', '/tmp/wordCount.jar',
       content = StaticFile('wordCount.jar'),