You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ao...@apache.org on 2014/02/11 17:17:44 UTC

[2/2] git commit: AMBARI-4583. Extract common hdfs directories creation code per services (Eugene Chekanskiy via aonishuk)

AMBARI-4583. Extract common hdfs directories creation code per services
(Eugene Chekanskiy via aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/fa47490b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/fa47490b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/fa47490b

Branch: refs/heads/trunk
Commit: fa47490b58668ab7e33eba82174e011f16cb6f1b
Parents: 81f5dd2
Author: Andrew Onischuk <ao...@hortonworks.com>
Authored: Tue Feb 11 08:16:38 2014 -0800
Committer: Andrew Onischuk <ao...@hortonworks.com>
Committed: Tue Feb 11 08:16:38 2014 -0800

----------------------------------------------------------------------
 .../libraries/providers/__init__.py             |   3 +-
 .../libraries/providers/hdfs_directory.py       | 109 ++++++++++
 .../libraries/resources/__init__.py             |   3 +-
 .../libraries/resources/hdfs_directory.py       |  44 ++++
 .../services/HBASE/package/scripts/hbase.py     |  23 ++-
 .../HBASE/package/scripts/hbase_client.py       |   2 +-
 .../HBASE/package/scripts/hbase_master.py       |   3 +-
 .../HBASE/package/scripts/hbase_regionserver.py |   3 +-
 .../services/HBASE/package/scripts/params.py    |  21 ++
 .../HDFS/package/scripts/hdfs_namenode.py       | 109 ++--------
 .../services/HDFS/package/scripts/params.py     |  55 ++---
 .../1.3.2/services/HIVE/package/scripts/hive.py |  13 ++
 .../HIVE/package/scripts/hive_server.py         |   2 +-
 .../services/HIVE/package/scripts/params.py     |  22 ++
 .../MAPREDUCE/package/scripts/historyserver.py  |   5 +-
 .../MAPREDUCE/package/scripts/jobtracker.py     |   5 +-
 .../MAPREDUCE/package/scripts/mapreduce.py      |  32 ++-
 .../MAPREDUCE/package/scripts/params.py         |  23 ++-
 .../services/OOZIE/package/scripts/oozie.py     |   6 +
 .../OOZIE/package/scripts/oozie_server.py       |   2 +-
 .../services/OOZIE/package/scripts/params.py    |  21 ++
 .../services/WEBHCAT/package/scripts/params.py  |  25 +++
 .../services/WEBHCAT/package/scripts/webhcat.py |  17 ++
 .../services/HBASE/package/scripts/hbase.py     |  21 +-
 .../HBASE/package/scripts/hbase_client.py       |   2 +-
 .../HBASE/package/scripts/hbase_master.py       |   3 +-
 .../HBASE/package/scripts/hbase_regionserver.py |   3 +-
 .../services/HBASE/package/scripts/params.py    |  21 ++
 .../services/HDFS/package/scripts/params.py     |  52 ++---
 .../2.0.6/services/HIVE/package/scripts/hive.py |  13 ++
 .../HIVE/package/scripts/hive_server.py         |   2 +-
 .../services/HIVE/package/scripts/params.py     |  25 ++-
 .../services/OOZIE/package/scripts/oozie.py     |   7 +
 .../services/OOZIE/package/scripts/params.py    |  20 ++
 .../services/WEBHCAT/package/scripts/params.py  |  24 +++
 .../services/WEBHCAT/package/scripts/webhcat.py |  18 ++
 .../YARN/package/scripts/historyserver.py       |   5 +-
 .../YARN/package/scripts/nodemanager.py         |   5 +-
 .../services/YARN/package/scripts/params.py     |  26 ++-
 .../2.0.6/services/YARN/package/scripts/yarn.py |  35 +++-
 .../services/FALCON/package/scripts/falcon.py   |   6 +
 .../services/FALCON/package/scripts/params.py   |  19 ++
 .../stacks/1.3.2/HBASE/test_hbase_master.py     |  74 ++++++-
 .../1.3.2/HBASE/test_hbase_regionserver.py      |  74 ++++++-
 .../stacks/1.3.2/HIVE/test_hive_server.py       |  56 ++++++
 .../MAPREDUCE/test_mapreduce_historyserver.py   | 118 ++++++++++-
 .../MAPREDUCE/test_mapreduce_jobtracker.py      | 118 ++++++++++-
 .../stacks/1.3.2/OOZIE/test_oozie_server.py     | 200 +++++++++++++++++++
 .../stacks/1.3.2/WEBHCAT/test_webhcat_server.py |  56 ++++++
 .../stacks/2.0.6/HBASE/test_hbase_master.py     |  74 ++++++-
 .../2.0.6/HBASE/test_hbase_regionserver.py      |  74 ++++++-
 .../stacks/2.0.6/HIVE/test_hive_server.py       |  56 ++++++
 .../stacks/2.0.6/OOZIE/test_oozie_server.py     |  20 ++
 .../stacks/2.0.6/WEBHCAT/test_webhcat_server.py |  56 ++++++
 .../stacks/2.0.6/YARN/test_historyserver.py     | 124 +++++++++++-
 .../stacks/2.0.6/YARN/test_nodemanager.py       | 124 +++++++++++-
 .../src/test/python/stacks/utils/RMFTestCase.py |   3 +
 57 files changed, 1833 insertions(+), 249 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-agent/src/main/python/resource_management/libraries/providers/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/resource_management/libraries/providers/__init__.py b/ambari-agent/src/main/python/resource_management/libraries/providers/__init__.py
index 59b6bb5..8d557f2 100644
--- a/ambari-agent/src/main/python/resource_management/libraries/providers/__init__.py
+++ b/ambari-agent/src/main/python/resource_management/libraries/providers/__init__.py
@@ -31,6 +31,7 @@ PROVIDERS = dict(
     XmlConfig="resource_management.libraries.providers.xml_config.XmlConfigProvider",
     PropertiesFile="resource_management.libraries.providers.properties_file.PropertiesFileProvider",
     MonitorWebserver="resource_management.libraries.providers.monitor_webserver.MonitorWebserverProvider",
-    Repository="resource_management.libraries.providers.repository.RepositoryProvider"
+    Repository="resource_management.libraries.providers.repository.RepositoryProvider",
+    HdfsDirectory="resource_management.libraries.providers.hdfs_directory.HdfsDirectoryProvider"
   ),
 )

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-agent/src/main/python/resource_management/libraries/providers/hdfs_directory.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/resource_management/libraries/providers/hdfs_directory.py b/ambari-agent/src/main/python/resource_management/libraries/providers/hdfs_directory.py
new file mode 100644
index 0000000..08ac9cd
--- /dev/null
+++ b/ambari-agent/src/main/python/resource_management/libraries/providers/hdfs_directory.py
@@ -0,0 +1,109 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+from resource_management import *
+directories_list = [] #direcotries list for mkdir
+chmod_map = {} #(mode,recursive):dir_list map
+chown_map = {} #(owner,group,recursive):dir_list map
+class HdfsDirectoryProvider(Provider):
+  def action_create_delayed(self):
+    global delayed_directories
+    global chmod_map
+    global chown_map
+
+    if not self.resource.dir_name:
+      return
+
+    dir_name = self.resource.dir_name
+    dir_owner = self.resource.owner
+    dir_group = self.resource.group
+    dir_mode = oct(self.resource.mode)[1:] if self.resource.mode else None
+    directories_list.append(self.resource.dir_name)
+
+    recursive_chown_str = "-R" if self.resource.recursive_chown else ""
+    recursive_chmod_str = "-R" if self.resource.recursive_chmod else ""
+    # grouping directories by mode/owner/group to modify them in one 'chXXX' call
+    if dir_mode:
+      chmod_key = (dir_mode,recursive_chmod_str)
+      if chmod_map.has_key(chmod_key):
+        chmod_map[chmod_key].append(dir_name)
+      else:
+        chmod_map[chmod_key] = [dir_name]
+
+    if dir_owner:
+      owner_key = (dir_owner,dir_group,recursive_chown_str)
+      if chown_map.has_key(owner_key):
+        chown_map[owner_key].append(dir_name)
+      else:
+        chown_map[owner_key] = [dir_name]
+
+  def action_create(self):
+    global delayed_directories
+    global chmod_map
+    global chown_map
+
+    self.action_create_delayed()
+
+    hdp_conf_dir = self.resource.conf_dir
+    hdp_hdfs_user = self.resource.hdfs_user
+    secured = self.resource.security_enabled
+    keytab_file = self.resource.keytab
+    kinit_path = self.resource.kinit_path_local
+
+    chmod_commands = []
+    chown_commands = []
+
+    for chmod_key, chmod_dirs in chmod_map.items():
+      mode = chmod_key[0]
+      recursive = chmod_key[1]
+      chmod_dirs_str = ' '.join(chmod_dirs)
+      chmod_commands.append(format("hadoop fs -chmod {recursive} {mode} {chmod_dirs_str}"))
+
+    for chown_key, chown_dirs in chown_map.items():
+      owner = chown_key[0]
+      group = chown_key[1]
+      recursive = chown_key[2]
+      chown_dirs_str = ' '.join(chown_dirs)
+      if owner:
+        chown = owner
+        if group:
+          chown = format("{owner}:{group}")
+        chown_commands.append(format("hadoop fs -chown {recursive} {chown} {chown_dirs_str}"))
+
+    if secured:
+        Execute(format("{kinit_path} -kt {keytab_file} {hdp_hdfs_user}"),
+                user=hdp_hdfs_user)
+    #create all directories in one 'mkdir' call
+    dir_list_str = ' '.join(directories_list)
+    #for hadoop 2 we need to specify -p to create directories recursively
+    parent_flag = '`rpm -q hadoop | grep -q "hadoop-1" || echo "-p"`'
+
+    Execute(format('hadoop fs -mkdir {parent_flag} {dir_list_str} && {chmod_cmd} && {chown_cmd}',
+                   chmod_cmd=' && '.join(chmod_commands),
+                   chown_cmd=' && '.join(chown_commands)),
+            user=hdp_hdfs_user,
+            not_if=format("su - {hdp_hdfs_user} -c 'hadoop fs -ls {dir_list_str}'")
+    )
+
+    directories_list[:] = []
+    chmod_map.clear()
+    chown_map.clear()

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-agent/src/main/python/resource_management/libraries/resources/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/resource_management/libraries/resources/__init__.py b/ambari-agent/src/main/python/resource_management/libraries/resources/__init__.py
index 7c86d18..a072455 100644
--- a/ambari-agent/src/main/python/resource_management/libraries/resources/__init__.py
+++ b/ambari-agent/src/main/python/resource_management/libraries/resources/__init__.py
@@ -25,4 +25,5 @@ from resource_management.libraries.resources.template_config import *
 from resource_management.libraries.resources.xml_config import *
 from resource_management.libraries.resources.properties_file import *
 from resource_management.libraries.resources.repository import *
-from resource_management.libraries.resources.monitor_webserver import *
\ No newline at end of file
+from resource_management.libraries.resources.monitor_webserver import *
+from resource_management.libraries.resources.hdfs_directory import *
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-agent/src/main/python/resource_management/libraries/resources/hdfs_directory.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/resource_management/libraries/resources/hdfs_directory.py b/ambari-agent/src/main/python/resource_management/libraries/resources/hdfs_directory.py
new file mode 100644
index 0000000..63d9cc2
--- /dev/null
+++ b/ambari-agent/src/main/python/resource_management/libraries/resources/hdfs_directory.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+_all__ = ["HdfsDirectory"]
+from resource_management.core.base import Resource, ForcedListArgument, ResourceArgument, BooleanArgument
+
+class HdfsDirectory(Resource):
+  action = ForcedListArgument()
+
+  dir_name = ResourceArgument(default=lambda obj: obj.name)
+  owner = ResourceArgument()
+  group = ResourceArgument()
+  mode = ResourceArgument()
+  recursive_chown = BooleanArgument(default=False)
+  recursive_chmod = BooleanArgument(default=False)
+
+  conf_dir = ResourceArgument()
+  security_enabled = BooleanArgument(default=False)
+  keytab = ResourceArgument()
+  kinit_path_local = ResourceArgument()
+  hdfs_user = ResourceArgument()
+
+  #action 'create' immediately creates all pending directory in efficient manner
+  #action 'create_delayed' add directory to list of pending directories
+  actions = Resource.actions + ["create","create_delayed"]

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/hbase.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/hbase.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/hbase.py
index e77a233..c140a6d 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/hbase.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/hbase.py
@@ -22,10 +22,23 @@ import os
 from resource_management import *
 import sys
 
-def hbase(type=None # 'master' or 'regionserver' or 'client'
+def hbase(name=None # 'master' or 'regionserver' or 'client'
               ):
   import params
-  
+
+
+  if name in ["regionserver","master"]:
+    params.HdfsDirectory(params.hbase_hdfs_root_dir,
+                         action="create_delayed",
+                         owner=params.hbase_user
+    )
+    params.HdfsDirectory(params.hbase_staging_dir,
+                         action="create_delayed",
+                         owner=params.hbase_user,
+                         mode=0711
+    )
+    params.HdfsDirectory(None, action="create")
+
   Directory( params.conf_dir,
       owner = params.hbase_user,
       group = params.user_group,
@@ -67,15 +80,15 @@ def hbase(type=None # 'master' or 'regionserver' or 'client'
   hbase_TemplateConfig( 'hbase-env.sh')     
        
   hbase_TemplateConfig( params.metric_prop_file_name,
-    tag = 'GANGLIA-MASTER' if type == 'master' else 'GANGLIA-RS'
+    tag = 'GANGLIA-MASTER' if name == 'master' else 'GANGLIA-RS'
   )
 
   hbase_TemplateConfig( 'regionservers')
 
   if params.security_enabled:
-    hbase_TemplateConfig( format("hbase_{type}_jaas.conf"))
+    hbase_TemplateConfig( format("hbase_{name}_jaas.conf"))
   
-  if type != "client":
+  if name != "client":
     Directory( params.pid_dir,
       owner = params.hbase_user,
       recursive = True

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/hbase_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/hbase_client.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/hbase_client.py
index 0f2a1bc..01ca548 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/hbase_client.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/hbase_client.py
@@ -33,7 +33,7 @@ class HbaseClient(Script):
     import params
     env.set_params(params)
     
-    hbase(type='client')
+    hbase(name='client')
 
   def status(self, env):
     raise ClientComponentHasNoStatus()

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/hbase_master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/hbase_master.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/hbase_master.py
index 9c78e5c..4b36eb2 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/hbase_master.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/hbase_master.py
@@ -29,13 +29,12 @@ from hbase_decommission import hbase_decommission
 class HbaseMaster(Script):
   def install(self, env):
     self.install_packages(env)
-    self.configure(env)
     
   def configure(self, env):
     import params
     env.set_params(params)
 
-    hbase(type='master')
+    hbase(name='master')
     
   def start(self, env):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/hbase_regionserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/hbase_regionserver.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/hbase_regionserver.py
index 2d91e75..49528d3 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/hbase_regionserver.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/hbase_regionserver.py
@@ -28,13 +28,12 @@ from hbase_service import hbase_service
 class HbaseRegionServer(Script):
   def install(self, env):
     self.install_packages(env)
-    self.configure(env)
     
   def configure(self, env):
     import params
     env.set_params(params)
 
-    hbase(type='regionserver')
+    hbase(name='regionserver')
       
   def start(self, env):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/params.py
index 8e85637..923b6ce 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/params.py
@@ -98,3 +98,24 @@ if ('hbase-log4j' in config['configurations']):
   log4j_props = config['configurations']['hbase-log4j']
 else:
   log4j_props = None
+
+#hdfs directories
+hbase_hdfs_root_dir = config['configurations']['hbase-site']['hbase.rootdir']
+hbase_staging_dir = "/apps/hbase/staging"
+#for create_hdfs_directory
+hostname = config["hostname"]
+hadoop_conf_dir = "/etc/hadoop/conf"
+hdfs_user_keytab = config['configurations']['global']['hdfs_user_keytab']
+hdfs_user = config['configurations']['global']['hdfs_user']
+kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+import functools
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
+  security_enabled = security_enabled,
+  keytab = hdfs_user_keytab,
+  kinit_path_local = kinit_path_local
+)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_namenode.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_namenode.py
index bea67ab..9d193cf 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_namenode.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_namenode.py
@@ -19,18 +19,16 @@ limitations under the License.
 
 from resource_management import *
 from utils import service
-from utils import hdfs_directory
-import urlparse
 
 
-def namenode(action=None, format=True):
+def namenode(action=None, do_format=True):
   import params
 
   if action == "configure":
     create_name_dirs(params.dfs_name_dir)
 
   if action == "start":
-    if format:
+    if do_format:
       format_namenode()
       pass
     service(
@@ -41,9 +39,15 @@ def namenode(action=None, format=True):
       principal=params.dfs_namenode_kerberos_principal
     )
 
-    # TODO: extract creating of dirs to different services
-    create_app_directories()
-    create_user_directories()
+    namenode_safe_mode_off = format("su - {hdfs_user} -c 'hadoop dfsadmin -safemode get' | grep 'Safe mode is OFF'")
+    if params.security_enabled:
+      Execute(format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_user}"),
+              user = params.hdfs_user)
+    Execute(namenode_safe_mode_off,
+            tries=40,
+            try_sleep=10
+    )
+    create_hdfs_directories()
 
   if action == "stop":
     service(
@@ -66,89 +70,20 @@ def create_name_dirs(directories):
             recursive=True
   )
 
-
-def create_app_directories():
+def create_hdfs_directories():
   import params
 
-  hdfs_directory(name="/tmp",
-                 owner=params.hdfs_user,
-                 mode="777"
+  params.HdfsDirectory("/tmp",
+                       action="create_delayed",
+                       owner=params.hdfs_user,
+                       mode=0777
   )
-  #mapred directories
-  if params.has_jobtracker:
-    hdfs_directory(name="/mapred",
-                   owner=params.mapred_user
-    )
-    hdfs_directory(name="/mapred/system",
-                   owner=params.mapred_user
-    )
-    #hbase directories
-  if len(params.hbase_master_hosts) != 0:
-    hdfs_directory(name=params.hbase_hdfs_root_dir,
-                   owner=params.hbase_user
-    )
-    hdfs_directory(name=params.hbase_staging_dir,
-                   owner=params.hbase_user,
-                   mode="711"
-    )
-    #hive directories
-  if len(params.hive_server_host) != 0:
-    hdfs_directory(name=params.hive_apps_whs_dir,
-                   owner=params.hive_user,
-                   mode="777"
-    )
-  if len(params.hcat_server_hosts) != 0:
-    hdfs_directory(name=params.webhcat_apps_dir,
-                   owner=params.webhcat_user,
-                   mode="755"
-    )
-  if len(params.hs_host) != 0:
-    hdfs_directory(name=params.mapreduce_jobhistory_intermediate_done_dir,
-                   owner=params.mapred_user,
-                   group=params.user_group,
-                   mode="777"
-    )
-
-    hdfs_directory(name=params.mapreduce_jobhistory_done_dir,
-                   owner=params.mapred_user,
-                   group=params.user_group,
-                   mode="777"
-    )
-
-  pass
-
-
-def create_user_directories():
-  import params
-
-  hdfs_directory(name=params.smoke_hdfs_user_dir,
-                 owner=params.smoke_user,
-                 mode=params.smoke_hdfs_user_mode
+  params.HdfsDirectory(params.smoke_hdfs_user_dir,
+                       action="create_delayed",
+                       owner=params.smoke_user,
+                       mode=params.smoke_hdfs_user_mode
   )
-
-  if params.has_hive_server_host:
-    hdfs_directory(name=params.hive_hdfs_user_dir,
-                   owner=params.hive_user,
-                   mode=params.hive_hdfs_user_mode
-    )
-
-  if params.has_hcat_server_host:
-    if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
-      hdfs_directory(name=params.hcat_hdfs_user_dir,
-                     owner=params.hcat_user,
-                     mode=params.hcat_hdfs_user_mode
-      )
-    hdfs_directory(name=params.webhcat_hdfs_user_dir,
-                   owner=params.webhcat_user,
-                   mode=params.webhcat_hdfs_user_mode
-    )
-
-  if params.has_oozie_server:
-    hdfs_directory(name=params.oozie_hdfs_user_dir,
-                   owner=params.oozie_user,
-                   mode=params.oozie_hdfs_user_mode
-    )
-
+  params.HdfsDirectory(None, action="create")
 
 def format_namenode(force=None):
   import params
@@ -191,4 +126,4 @@ def decommission():
                   user=hdfs_user,
                   conf_dir=conf_dir,
                   kinit_override=True)
-    pass
\ No newline at end of file
+    pass

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py
index 324e307..0e26060 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py
@@ -123,49 +123,38 @@ dfs_domain_socket_dir = os.path.dirname(dfs_domain_socket_path)
 
 hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
 
-jn_edits_dir = config['configurations']['hdfs-site']['dfs.journalnode.edits.dir']#"/grid/0/hdfs/journal"
+jn_edits_dir = config['configurations']['hdfs-site']['dfs.journalnode.edits.dir']
 
-# if stack_version[0] == "2":
-#dfs_name_dir = config['configurations']['hdfs-site']['dfs.namenode.name.dir']
-# else:
-dfs_name_dir = config['configurations']['hdfs-site']['dfs.name.dir']#","/tmp/hadoop-hdfs/dfs/name")
+dfs_name_dir = config['configurations']['hdfs-site']['dfs.name.dir']
 
 namenode_dirs_created_stub_dir = format("{hdfs_log_dir_prefix}/{hdfs_user}")
 namenode_dirs_stub_filename = "namenode_dirs_created"
 
-hbase_hdfs_root_dir = config['configurations']['hbase-site']['hbase.rootdir']#","/apps/hbase/data")
-hbase_staging_dir = "/apps/hbase/staging"
-hive_apps_whs_dir = config['configurations']['hive-site']["hive.metastore.warehouse.dir"] #, "/apps/hive/warehouse")
-webhcat_apps_dir = "/apps/webhcat"
-mapreduce_jobhistory_intermediate_done_dir = config['configurations']['mapred-site']['mapreduce.jobhistory.intermediate-done-dir']#","/app-logs")
-mapreduce_jobhistory_done_dir = config['configurations']['mapred-site']['mapreduce.jobhistory.done-dir']#","/mr-history/done")
-
-if has_oozie_server:
-  oozie_hdfs_user_dir = format("/user/{oozie_user}")
-  oozie_hdfs_user_mode = 775
-if has_hcat_server_host:
-  hcat_hdfs_user_dir = format("/user/{hcat_user}")
-  hcat_hdfs_user_mode = 755
-  webhcat_hdfs_user_dir = format("/user/{webhcat_user}")
-  webhcat_hdfs_user_mode = 755
-if has_hive_server_host:
-  hive_hdfs_user_dir = format("/user/{hive_user}")
-  hive_hdfs_user_mode = 700
 smoke_hdfs_user_dir = format("/user/{smoke_user}")
-smoke_hdfs_user_mode = 770
+smoke_hdfs_user_mode = 0770
 
 namenode_formatted_mark_dir = format("{hadoop_pid_dir_prefix}/hdfs/namenode/formatted/")
 
-# if stack_version[0] == "2":
-#fs_checkpoint_dir = config['configurations']['hdfs-site']['dfs.namenode.checkpoint.dir'] #","/tmp/hadoop-hdfs/dfs/namesecondary")
-# else:
-fs_checkpoint_dir = config['configurations']['core-site']['fs.checkpoint.dir']#","/tmp/hadoop-hdfs/dfs/namesecondary")
-
-# if stack_version[0] == "2":
-#dfs_data_dir = config['configurations']['hdfs-site']['dfs.datanode.data.dir']#,"/tmp/hadoop-hdfs/dfs/data")
-# else:
-dfs_data_dir = config['configurations']['hdfs-site']['dfs.data.dir']#,"/tmp/hadoop-hdfs/dfs/data")
+fs_checkpoint_dir = config['configurations']['core-site']['fs.checkpoint.dir']
 
+dfs_data_dir = config['configurations']['hdfs-site']['dfs.data.dir']
+#for create_hdfs_directory
+hostname = config["hostname"]
+hadoop_conf_dir = "/etc/hadoop/conf"
+hdfs_user_keytab = config['configurations']['global']['hdfs_user_keytab']
+hdfs_user = config['configurations']['global']['hdfs_user']
+kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+import functools
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
+  security_enabled = security_enabled,
+  keytab = hdfs_user_keytab,
+  kinit_path_local = kinit_path_local
+)
 
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive.py
index 5f03871..940f8c3 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive.py
@@ -26,6 +26,19 @@ import sys
 def hive(name=None):
   import params
 
+
+  if name == "hiveserver2":
+    params.HdfsDirectory(params.hive_apps_whs_dir,
+                   action="create_delayed",
+                   owner=params.hive_user,
+                   mode=0777
+    )
+    params.HdfsDirectory(params.hive_hdfs_user_dir,
+                   action="create_delayed",
+                   owner=params.hive_user,
+                   mode=params.hive_hdfs_user_mode
+    )
+    params.HdfsDirectory(None, action="create")
   if name == 'metastore' or name == 'hiveserver2':
     hive_config_dir = params.hive_server_conf_dir
     config_file_mode = 0600

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive_server.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive_server.py
index 3ad81a1..b05649c 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive_server.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hive_server.py
@@ -28,7 +28,7 @@ class HiveServer(Script):
 
   def install(self, env):
     self.install_packages(env)
-    self.configure(env)
+
   def configure(self, env):
     import params
     env.set_params(params)

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py
index ca8393c..a9d3e9c 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py
@@ -135,3 +135,25 @@ else:
   log4j_exec_props = None
   
 daemon_name = status_params.daemon_name
+
+#hdfs directories
+hive_apps_whs_dir = config['configurations']['hive-site']["hive.metastore.warehouse.dir"]
+hive_hdfs_user_dir = format("/user/{hive_user}")
+hive_hdfs_user_mode = 0700
+#for create_hdfs_directory
+hostname = config["hostname"]
+hadoop_conf_dir = "/etc/hadoop/conf"
+hdfs_user_keytab = config['configurations']['global']['hdfs_user_keytab']
+hdfs_user = config['configurations']['global']['hdfs_user']
+kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+import functools
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
+  security_enabled = security_enabled,
+  keytab = hdfs_user_keytab,
+  kinit_path_local = kinit_path_local
+)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/historyserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/historyserver.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/historyserver.py
index 972a767..25bc59a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/historyserver.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/historyserver.py
@@ -28,12 +28,11 @@ from service import service
 class Historyserver(Script):
   def install(self, env):
     self.install_packages(env)
-    self.configure(env)
   
   def configure(self, env):
     import params
     env.set_params(params)
-    mapreduce()
+    mapreduce(name="historyserver")
 
   def start(self, env):
     import params
@@ -57,4 +56,4 @@ class Historyserver(Script):
      check_process_status(status_params.historyserver_pid_file)
 
 if __name__ == "__main__":
-  Historyserver().execute()
\ No newline at end of file
+  Historyserver().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/jobtracker.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/jobtracker.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/jobtracker.py
index d9a4709..e87e469 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/jobtracker.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/jobtracker.py
@@ -29,12 +29,11 @@ from service import service
 class Jobtracker(Script):
   def install(self, env):
     self.install_packages(env)
-    self.configure(env)
 
   def configure(self, env):
     import params
     env.set_params(params)
-    mapreduce()
+    mapreduce(name="jobtracker")
 
   def start(self, env):
     import params
@@ -82,4 +81,4 @@ class Jobtracker(Script):
     pass
 
 if __name__ == "__main__":
-  Jobtracker().execute()
\ No newline at end of file
+  Jobtracker().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/mapreduce.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/mapreduce.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/mapreduce.py
index c5fd002..9e9233a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/mapreduce.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/mapreduce.py
@@ -24,9 +24,37 @@ from resource_management import *
 import sys
 
 
-def mapreduce():
+def mapreduce(name=None):
   import params
 
+
+  if name in ["jobtracker","historyserver"]:
+    params.HdfsDirectory("/mapred",
+                         action="create_delayed",
+                         owner=params.mapred_user
+    )
+    params.HdfsDirectory("/mapred/system",
+                         action="create_delayed",
+                         owner=params.mapred_user
+    )
+    params.HdfsDirectory("/mapred/history",
+                         action="create_delayed",
+                         owner=params.mapred_user
+    )
+    params.HdfsDirectory(params.mapreduce_jobhistory_intermediate_done_dir,
+                         action="create_delayed",
+                         owner=params.mapred_user,
+                         group=params.user_group,
+                         mode=0777
+    )
+    params.HdfsDirectory(params.mapreduce_jobhistory_done_dir,
+                         action="create_delayed",
+                         owner=params.mapred_user,
+                         group=params.user_group,
+                         mode=0777
+    )
+    params.HdfsDirectory(None, action="create")
+
   Directory([params.mapred_pid_dir,params.mapred_log_dir],
             owner=params.mapred_user,
             group=params.user_group,
@@ -47,4 +75,4 @@ def mapreduce():
   File(params.mapred_hosts_file_path,
             owner=params.mapred_user,
             group=params.user_group,
-  )
\ No newline at end of file
+  )

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/params.py
index d68d212..47b2db0 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/params.py
@@ -52,4 +52,25 @@ kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "
 #exclude file
 mr_exclude_hosts = default("/clusterHostInfo/decom_tt_hosts", [])
 exclude_file_path = config['configurations']['mapred-site']['mapred.hosts.exclude']
-mapred_hosts_file_path = config['configurations']['mapred-site']['mapred.hosts']
\ No newline at end of file
+mapred_hosts_file_path = config['configurations']['mapred-site']['mapred.hosts']
+
+#hdfs directories
+mapreduce_jobhistory_intermediate_done_dir = config['configurations']['mapred-site']['mapreduce.jobhistory.intermediate-done-dir']
+mapreduce_jobhistory_done_dir = config['configurations']['mapred-site']['mapred.job.tracker.history.completed.location']
+#for create_hdfs_directory
+hostname = config["hostname"]
+hadoop_conf_dir = "/etc/hadoop/conf"
+hdfs_user_keytab = config['configurations']['global']['hdfs_user_keytab']
+hdfs_user = config['configurations']['global']['hdfs_user']
+kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+import functools
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
+  security_enabled = security_enabled,
+  keytab = hdfs_user_keytab,
+  kinit_path_local = kinit_path_local
+)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie.py
index e1a7869..151a1f8 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie.py
@@ -5,6 +5,12 @@ def oozie(is_server=False
               ):
   import params
 
+  if is_server:
+    params.HdfsDirectory(params.oozie_hdfs_user_dir,
+                         action="create",
+                         owner=params.oozie_user,
+                         mode=params.oozie_hdfs_user_mode
+    )
   XmlConfig( "oozie-site.xml",
     conf_dir = params.conf_dir, 
     configurations = params.config['configurations']['oozie-site'],

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie_server.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie_server.py
index eca2a56..513e9e6 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie_server.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/oozie_server.py
@@ -28,7 +28,6 @@ from oozie_service import oozie_service
 class OozieServer(Script):
   def install(self, env):
     self.install_packages(env)
-    self.configure(env)
     
   def configure(self, env):
     import params
@@ -39,6 +38,7 @@ class OozieServer(Script):
   def start(self, env):
     import params
     env.set_params(params)
+    self.configure(env)
     oozie_service(action='start')
     
   def stop(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/params.py
index cd3e7bb..93f3dd1 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/params.py
@@ -68,3 +68,24 @@ if ('oozie-log4j' in config['configurations']):
   log4j_props = config['configurations']['oozie-log4j']
 else:
   log4j_props = None
+
+#hdfs directories
+oozie_hdfs_user_dir = format("/user/{oozie_user}")
+oozie_hdfs_user_mode = 0775
+#for create_hdfs_directory
+hostname = config["hostname"]
+hadoop_conf_dir = "/etc/hadoop/conf"
+hdfs_user_keytab = config['configurations']['global']['hdfs_user_keytab']
+hdfs_user = config['configurations']['global']['hdfs_user']
+kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+import functools
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
+  security_enabled = security_enabled,
+  keytab = hdfs_user_keytab,
+  kinit_path_local = kinit_path_local
+)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/params.py
index 83211e1..d6309bf 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/params.py
@@ -26,6 +26,7 @@ import status_params
 # server configurations
 config = Script.get_config()
 
+hcat_user = config['configurations']['global']['hcat_user']
 webhcat_user = config['configurations']['global']['webhcat_user']
 download_url = config['configurations']['global']['apache_artifacts_download_url']
 
@@ -49,3 +50,27 @@ smoke_user_keytab = config['configurations']['global']['smokeuser_keytab']
 smokeuser = config['configurations']['global']['smokeuser']
 security_enabled = config['configurations']['global']['security_enabled']
 kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+
+#hdfs directories
+webhcat_apps_dir = "/apps/webhcat"
+hcat_hdfs_user_dir = format("/user/{hcat_user}")
+hcat_hdfs_user_mode = 0755
+webhcat_hdfs_user_dir = format("/user/{webhcat_user}")
+webhcat_hdfs_user_mode = 0755
+#for create_hdfs_directory
+hostname = config["hostname"]
+hadoop_conf_dir = "/etc/hadoop/conf"
+hdfs_user_keytab = config['configurations']['global']['hdfs_user_keytab']
+hdfs_user = config['configurations']['global']['hdfs_user']
+kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+import functools
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
+  security_enabled = security_enabled,
+  keytab = hdfs_user_keytab,
+  kinit_path_local = kinit_path_local
+)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/webhcat.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/webhcat.py b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/webhcat.py
index ae12f54..0c75d18 100644
--- a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/webhcat.py
+++ b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/webhcat.py
@@ -26,6 +26,23 @@ import sys
 def webhcat():
   import params
 
+  if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
+    params.HdfsDirectory(params.hcat_hdfs_user_dir,
+                         action="create_delayed",
+                         owner=params.hcat_user,
+                         mode=params.hcat_hdfs_user_mode
+    )
+  params.HdfsDirectory(params.webhcat_hdfs_user_dir,
+                       action="create_delayed",
+                       owner=params.webhcat_user,
+                       mode=params.webhcat_hdfs_user_mode
+  )
+  params.HdfsDirectory(params.webhcat_apps_dir,
+                       action="create_delayed",
+                       owner=params.webhcat_user,
+                       mode=0755
+  )
+  params.HdfsDirectory(None, action="create")
   Directory(params.templeton_pid_dir,
             owner=params.webhcat_user,
             mode=0755,

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase.py
index c0f48dc..d899d17 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase.py
@@ -22,10 +22,21 @@ import os
 from resource_management import *
 import sys
 
-def hbase(type=None # 'master' or 'regionserver' or 'client'
+def hbase(name=None # 'master' or 'regionserver' or 'client'
               ):
   import params
-  
+
+  if name in ["master","regionserver"]:
+    params.HdfsDirectory(params.hbase_hdfs_root_dir,
+                         action="create_delayed",
+                         owner=params.hbase_user
+    )
+    params.HdfsDirectory(params.hbase_staging_dir,
+                         action="create_delayed",
+                         owner=params.hbase_user,
+                         mode=0711
+    )
+    params.HdfsDirectory(None, action="create")
   Directory( params.conf_dir,
       owner = params.hbase_user,
       group = params.user_group,
@@ -67,15 +78,15 @@ def hbase(type=None # 'master' or 'regionserver' or 'client'
   hbase_TemplateConfig( 'hbase-env.sh')     
        
   hbase_TemplateConfig( params.metric_prop_file_name,
-    tag = 'GANGLIA-MASTER' if type == 'master' else 'GANGLIA-RS'
+    tag = 'GANGLIA-MASTER' if name == 'master' else 'GANGLIA-RS'
   )
 
   hbase_TemplateConfig( 'regionservers')
 
   if params.security_enabled:
-    hbase_TemplateConfig( format("hbase_{type}_jaas.conf"))
+    hbase_TemplateConfig( format("hbase_{name}_jaas.conf"))
   
-  if type != "client":
+  if name != "client":
     Directory( params.pid_dir,
       owner = params.hbase_user,
       recursive = True

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase_client.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase_client.py
index 0f2a1bc..01ca548 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase_client.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase_client.py
@@ -33,7 +33,7 @@ class HbaseClient(Script):
     import params
     env.set_params(params)
     
-    hbase(type='client')
+    hbase(name='client')
 
   def status(self, env):
     raise ClientComponentHasNoStatus()

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase_master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase_master.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase_master.py
index 9c78e5c..4b36eb2 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase_master.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase_master.py
@@ -29,13 +29,12 @@ from hbase_decommission import hbase_decommission
 class HbaseMaster(Script):
   def install(self, env):
     self.install_packages(env)
-    self.configure(env)
     
   def configure(self, env):
     import params
     env.set_params(params)
 
-    hbase(type='master')
+    hbase(name='master')
     
   def start(self, env):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase_regionserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase_regionserver.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase_regionserver.py
index 2d91e75..49528d3 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase_regionserver.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/hbase_regionserver.py
@@ -28,13 +28,12 @@ from hbase_service import hbase_service
 class HbaseRegionServer(Script):
   def install(self, env):
     self.install_packages(env)
-    self.configure(env)
     
   def configure(self, env):
     import params
     env.set_params(params)
 
-    hbase(type='regionserver')
+    hbase(name='regionserver')
       
   def start(self, env):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py
index f94a04f..0422a7c 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py
@@ -97,3 +97,24 @@ if ('hbase-log4j' in config['configurations']):
   log4j_props = config['configurations']['hbase-log4j']
 else:
   log4j_props = None
+
+
+hbase_hdfs_root_dir = config['configurations']['hbase-site']['hbase.rootdir']
+hbase_staging_dir = "/apps/hbase/staging"
+#for create_hdfs_directory
+hostname = config["hostname"]
+hadoop_conf_dir = "/etc/hadoop/conf"
+hdfs_user_keytab = config['configurations']['global']['hdfs_user_keytab']
+hdfs_user = config['configurations']['global']['hdfs_user']
+kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+import functools
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
+  security_enabled = security_enabled,
+  keytab = hdfs_user_keytab,
+  kinit_path_local = kinit_path_local
+)

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
index 8a08a7e..bc1494f 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
@@ -126,55 +126,26 @@ dfs_domain_socket_dir = os.path.dirname(dfs_domain_socket_path)
 
 hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
 
-jn_edits_dir = config['configurations']['hdfs-site']['dfs.journalnode.edits.dir']#"/grid/0/hdfs/journal"
+jn_edits_dir = config['configurations']['hdfs-site']['dfs.journalnode.edits.dir']
 
-# if stack_version[0] == "2":
 dfs_name_dir = config['configurations']['hdfs-site']['dfs.namenode.name.dir']
-# else:
-#   dfs_name_dir = default("/configurations/hdfs-site/dfs.name.dir","/tmp/hadoop-hdfs/dfs/name")
 
 namenode_dirs_created_stub_dir = format("{hdfs_log_dir_prefix}/{hdfs_user}")
 namenode_dirs_stub_filename = "namenode_dirs_created"
 
-hbase_hdfs_root_dir = config['configurations']['hbase-site']['hbase.rootdir']#","/apps/hbase/data")
-hbase_staging_dir = "/apps/hbase/staging"
-hive_apps_whs_dir = config['configurations']['hive-site']["hive.metastore.warehouse.dir"] #, "/apps/hive/warehouse")
-webhcat_apps_dir = "/apps/webhcat"
-yarn_log_aggregation_enabled = config['configurations']['yarn-site']['yarn.log-aggregation-enable']#","true")
-yarn_nm_app_log_dir =  config['configurations']['yarn-site']['yarn.nodemanager.remote-app-log-dir']#","/app-logs")
-mapreduce_jobhistory_intermediate_done_dir = config['configurations']['mapred-site']['mapreduce.jobhistory.intermediate-done-dir']#","/app-logs")
-mapreduce_jobhistory_done_dir = config['configurations']['mapred-site']['mapreduce.jobhistory.done-dir']#","/mr-history/done")
-
-if has_oozie_server:
-  oozie_hdfs_user_dir = format("/user/{oozie_user}")
-  oozie_hdfs_user_mode = 775
-if has_hcat_server_host:
-  hcat_hdfs_user_dir = format("/user/{hcat_user}")
-  hcat_hdfs_user_mode = 755
-  webhcat_hdfs_user_dir = format("/user/{webhcat_user}")
-  webhcat_hdfs_user_mode = 755
-if has_hive_server_host:
-  hive_hdfs_user_dir = format("/user/{hive_user}")
-  hive_hdfs_user_mode = 700
 smoke_hdfs_user_dir = format("/user/{smoke_user}")
-smoke_hdfs_user_mode = 770
+smoke_hdfs_user_mode = 0770
 
 namenode_formatted_mark_dir = format("{hadoop_pid_dir_prefix}/hdfs/namenode/formatted/")
 
-# if stack_version[0] == "2":
-fs_checkpoint_dir = config['configurations']['hdfs-site']['dfs.namenode.checkpoint.dir'] #","/tmp/hadoop-hdfs/dfs/namesecondary")
-# else:
-#   fs_checkpoint_dir = default("/configurations/core-site/fs.checkpoint.dir","/tmp/hadoop-hdfs/dfs/namesecondary")
-
-# if stack_version[0] == "2":
-dfs_data_dir = config['configurations']['hdfs-site']['dfs.datanode.data.dir']#,"/tmp/hadoop-hdfs/dfs/data")
-# else:
-#   dfs_data_dir = default('/configurations/hdfs-site/dfs.data.dir',"/tmp/hadoop-hdfs/dfs/data")
+fs_checkpoint_dir = config['configurations']['hdfs-site']['dfs.namenode.checkpoint.dir']
 
+dfs_data_dir = config['configurations']['hdfs-site']['dfs.datanode.data.dir']
 # HDFS High Availability properties
 dfs_ha_enabled = False
 dfs_ha_nameservices = default("/configurations/hdfs-site/dfs.nameservices", None)
 dfs_ha_namenode_ids = default(format("/configurations/hdfs-site/dfs.ha.namenodes.{dfs_ha_nameservices}"), None)
+namenode_id = None
 if dfs_ha_namenode_ids:
   dfs_ha_namemodes_ids_list = dfs_ha_namenode_ids.split(",")
   dfs_ha_namenode_ids_array_len = len(dfs_ha_namemodes_ids_list)
@@ -185,10 +156,19 @@ if dfs_ha_enabled:
     nn_host = config['configurations']['hdfs-site'][format('dfs.namenode.rpc-address.{dfs_ha_nameservices}.{nn_id}')]
     if hostname in nn_host:
       namenode_id = nn_id
-  namenode_id = None
 
 journalnode_address = default('/configurations/hdfs-site/dfs.journalnode.http-address', None)
 if journalnode_address:
   journalnode_port = journalnode_address.split(":")[1]
 
-falcon_store_uri = config['configurations']['global']['falcon_store_uri']
+import functools
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
+  security_enabled = security_enabled,
+  keytab = hdfs_user_keytab,
+  kinit_path_local = kinit_path_local
+)

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive.py
index 44e6a77..bb1c065 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive.py
@@ -26,6 +26,19 @@ import os
 def hive(name=None):
   import params
 
+  if name == 'hiveserver2':
+
+    params.HdfsDirectory(params.hive_apps_whs_dir,
+                         action="create_delayed",
+                         owner=params.hive_user,
+                         mode=0777
+    )
+    params.HdfsDirectory(params.hive_hdfs_user_dir,
+                         action="create_delayed",
+                         owner=params.hive_user,
+                         mode=params.hive_hdfs_user_mode
+    )
+    params.HdfsDirectory(None, action="create")
   if name == 'metastore' or name == 'hiveserver2':
     hive_config_dir = params.hive_server_conf_dir
     config_file_mode = 0600

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_server.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_server.py
index 3ad81a1..b05649c 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_server.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hive_server.py
@@ -28,7 +28,7 @@ class HiveServer(Script):
 
   def install(self, env):
     self.install_packages(env)
-    self.configure(env)
+
   def configure(self, env):
     import params
     env.set_params(params)

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
index 4c2a335..7ed4322 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
@@ -122,7 +122,7 @@ hcat_user = config['configurations']['global']['hcat_user']
 webhcat_user = config['configurations']['global']['webhcat_user']
 
 hcat_pid_dir = status_params.hcat_pid_dir
-hcat_log_dir = config['configurations']['global']['hcat_log_dir']   #hcat_log_dir
+hcat_log_dir = config['configurations']['global']['hcat_log_dir']
 
 hadoop_conf_dir = '/etc/hadoop/conf'
 
@@ -139,3 +139,26 @@ else:
   log4j_exec_props = None
 
 daemon_name = status_params.daemon_name
+
+hive_hdfs_user_dir = format("/user/{hive_user}")
+hive_hdfs_user_mode = 0700
+hive_apps_whs_dir = config['configurations']['hive-site']["hive.metastore.warehouse.dir"]
+#for create_hdfs_directory
+hostname = config["hostname"]
+hadoop_conf_dir = "/etc/hadoop/conf"
+hdfs_user_keytab = config['configurations']['global']['hdfs_user_keytab']
+hdfs_user = config['configurations']['global']['hdfs_user']
+kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+import functools
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
+  security_enabled = security_enabled,
+  keytab = hdfs_user_keytab,
+  kinit_path_local = kinit_path_local
+)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie.py
index 53f7693..2efe535 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/oozie.py
@@ -24,6 +24,13 @@ from resource_management import *
 def oozie(is_server=False # TODO: see if see can remove this
               ):
   import params
+
+  if is_server:
+    params.HdfsDirectory(params.oozie_hdfs_user_dir,
+                         action="create",
+                         owner=params.oozie_user,
+                         mode=params.oozie_hdfs_user_mode
+    )
   #TODO hack for falcon el
   oozie_site = dict(params.config['configurations']['oozie-site'])
   oozie_site["oozie.services.ext"] = 'org.apache.oozie.service.JMSAccessorService,' + oozie_site["oozie.services.ext"]

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py
index 9e45f9d..463dccc 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py
@@ -81,3 +81,23 @@ if ('oozie-log4j' in config['configurations']):
   log4j_props = config['configurations']['oozie-log4j']
 else:
   log4j_props = None
+
+oozie_hdfs_user_dir = format("/user/{oozie_user}")
+oozie_hdfs_user_mode = 0775
+#for create_hdfs_directory
+hostname = config["hostname"]
+hadoop_conf_dir = "/etc/hadoop/conf"
+hdfs_user_keytab = config['configurations']['global']['hdfs_user_keytab']
+hdfs_user = config['configurations']['global']['hdfs_user']
+kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+import functools
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
+  security_enabled = security_enabled,
+  keytab = hdfs_user_keytab,
+  kinit_path_local = kinit_path_local
+)

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/params.py
index 08a01a4..ed2aa50 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/params.py
@@ -26,6 +26,7 @@ import status_params
 # server configurations
 config = Script.get_config()
 
+hcat_user = config['configurations']['global']['hcat_user']
 webhcat_user = config['configurations']['global']['webhcat_user']
 download_url = config['configurations']['global']['apache_artifacts_download_url']
 
@@ -54,3 +55,26 @@ smoke_user_keytab = config['configurations']['global']['smokeuser_keytab']
 smokeuser = config['configurations']['global']['smokeuser']
 security_enabled = config['configurations']['global']['security_enabled']
 kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+
+hcat_hdfs_user_dir = format("/user/{hcat_user}")
+hcat_hdfs_user_mode = 0755
+webhcat_hdfs_user_dir = format("/user/{webhcat_user}")
+webhcat_hdfs_user_mode = 0755
+webhcat_apps_dir = "/apps/webhcat"
+#for create_hdfs_directory
+hostname = config["hostname"]
+hadoop_conf_dir = "/etc/hadoop/conf"
+hdfs_user_keytab = config['configurations']['global']['hdfs_user_keytab']
+hdfs_user = config['configurations']['global']['hdfs_user']
+kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+import functools
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
+  security_enabled = security_enabled,
+  keytab = hdfs_user_keytab,
+  kinit_path_local = kinit_path_local
+)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat.py
index c56f1c5..7aa8521 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat.py
@@ -26,6 +26,24 @@ import sys
 def webhcat():
   import params
 
+  params.HdfsDirectory(params.webhcat_apps_dir,
+                       action="create_delayed",
+                       owner=params.webhcat_user,
+                       mode=0755
+  )
+  if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
+    params.HdfsDirectory(params.hcat_hdfs_user_dir,
+                         action="create_delayed",
+                         owner=params.hcat_user,
+                         mode=params.hcat_hdfs_user_mode
+    )
+  params.HdfsDirectory(params.webhcat_hdfs_user_dir,
+                       action="create_delayed",
+                       owner=params.webhcat_user,
+                       mode=params.webhcat_hdfs_user_mode
+  )
+  params.HdfsDirectory(None, action="create")
+
   Directory(params.templeton_pid_dir,
             owner=params.webhcat_user,
             mode=0755,

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/historyserver.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/historyserver.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/historyserver.py
index 3b6f8cc..6736d40 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/historyserver.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/historyserver.py
@@ -28,12 +28,11 @@ from service import service
 class Histroryserver(Script):
   def install(self, env):
     self.install_packages(env)
-    self.configure(env)
 
   def configure(self, env):
     import params
     env.set_params(params)
-    yarn()
+    yarn(name="historyserver")
 
   def start(self, env):
     import params
@@ -52,4 +51,4 @@ class Histroryserver(Script):
     check_process_status(status_params.mapred_historyserver_pid_file)
 
 if __name__ == "__main__":
-  Histroryserver().execute()
\ No newline at end of file
+  Histroryserver().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/nodemanager.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/nodemanager.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/nodemanager.py
index dbeaca0..473f50c 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/nodemanager.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/nodemanager.py
@@ -29,12 +29,11 @@ from service import service
 class Nodemanager(Script):
   def install(self, env):
     self.install_packages(env)
-    self.configure(env)
 
   def configure(self, env):
     import params
     env.set_params(params)
-    yarn()
+    yarn(name="nodemanager")
 
   def start(self, env):
     import params
@@ -58,4 +57,4 @@ class Nodemanager(Script):
     check_process_status(status_params.nodemanager_pid_file)
 
 if __name__ == "__main__":
-  Nodemanager().execute()
\ No newline at end of file
+  Nodemanager().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
index 8a9b3e9..fb8e51a 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
@@ -88,4 +88,28 @@ yarn_container_bin = "/usr/lib/hadoop-yarn/bin"
 #exclude file
 exclude_hosts = default("/clusterHostInfo/decom_nm_hosts", [])
 exclude_file_path = config['configurations']['yarn-site']['yarn.resourcemanager.nodes.exclude-path']
-update_exclude_file_only = config['commandParams']['update_exclude_file_only']
+
+
+yarn_log_aggregation_enabled = config['configurations']['yarn-site']['yarn.log-aggregation-enable']
+yarn_nm_app_log_dir =  config['configurations']['yarn-site']['yarn.nodemanager.remote-app-log-dir']
+mapreduce_jobhistory_intermediate_done_dir = config['configurations']['mapred-site']['mapreduce.jobhistory.intermediate-done-dir']
+mapreduce_jobhistory_done_dir = config['configurations']['mapred-site']['mapreduce.jobhistory.done-dir']
+
+#for create_hdfs_directory
+hostname = config["hostname"]
+hadoop_conf_dir = "/etc/hadoop/conf"
+hdfs_user_keytab = config['configurations']['global']['hdfs_user_keytab']
+hdfs_user = config['configurations']['global']['hdfs_user']
+kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+import functools
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
+  security_enabled = security_enabled,
+  keytab = hdfs_user_keytab,
+  kinit_path_local = kinit_path_local
+)
+update_exclude_file_only = config['commandParams']['update_exclude_file_only']
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/yarn.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/yarn.py b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/yarn.py
index 986356e..2d9cb71 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/yarn.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/yarn.py
@@ -24,9 +24,42 @@ from resource_management import *
 import sys
 
 
-def yarn():
+def yarn(name = None):
   import params
 
+
+  if name in ["nodemanager","historyserver"]:
+    if params.yarn_log_aggregation_enabled:
+      params.HdfsDirectory(params.yarn_nm_app_log_dir,
+                           action="create_delayed",
+                           owner=params.yarn_user,
+                           group=params.user_group,
+                           mode=0777,
+                           recursive_chmod=True
+      )
+    params.HdfsDirectory("/mapred",
+                         action="create_delayed",
+                         owner=params.mapred_user
+    )
+    params.HdfsDirectory("/mapred/system",
+                         action="create_delayed",
+                         owner=params.hdfs_user
+    )
+    params.HdfsDirectory(params.mapreduce_jobhistory_intermediate_done_dir,
+                         action="create_delayed",
+                         owner=params.mapred_user,
+                         group=params.user_group,
+                         mode=0777
+    )
+
+    params.HdfsDirectory(params.mapreduce_jobhistory_done_dir,
+                         action="create_delayed",
+                         owner=params.mapred_user,
+                         group=params.user_group,
+                         mode=01777
+    )
+    params.HdfsDirectory(None, action="create")
+
   Directory([params.yarn_pid_dir, params.yarn_log_dir],
             owner=params.yarn_user,
             group=params.user_group,

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/FALCON/package/scripts/falcon.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/FALCON/package/scripts/falcon.py b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/FALCON/package/scripts/falcon.py
index 4df7f4e..da613af 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/FALCON/package/scripts/falcon.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/FALCON/package/scripts/falcon.py
@@ -29,6 +29,12 @@ def falcon(type, action = None):
            mode=0644)
   elif type == 'server':
     if action == 'config':
+      if params.store_uri[0:4] == "hdfs":
+        params.HdfsDirectory(params.store_uri,
+                             action="create",
+                             owner=params.falcon_user,
+                             mode=0755
+        )
       Directory(params.falcon_local_dir,
                 owner=params.falcon_user,
                 recursive=True

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/FALCON/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/FALCON/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/FALCON/package/scripts/params.py
index 7f27862..88499ba 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/FALCON/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.1.1/services/FALCON/package/scripts/params.py
@@ -34,3 +34,22 @@ falcon_data_dir = format('{falcon_local_dir}/activemq')
 store_uri = config['configurations']['global']['falcon_store_uri']
 falcon_host = config['clusterHostInfo']['falcon_server_hosts'][0]
 falcon_port = config['configurations']['global']['falcon_port']
+
+#for create_hdfs_directory
+security_enabled = config['configurations']['global']['security_enabled']
+hostname = config["hostname"]
+hadoop_conf_dir = "/etc/hadoop/conf"
+hdfs_user_keytab = config['configurations']['global']['hdfs_user_keytab']
+hdfs_user = config['configurations']['global']['hdfs_user']
+kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+import functools
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
+  security_enabled = security_enabled,
+  keytab = hdfs_user_keytab,
+  kinit_path_local = kinit_path_local
+)

http://git-wip-us.apache.org/repos/asf/ambari/blob/fa47490b/ambari-server/src/test/python/stacks/1.3.2/HBASE/test_hbase_master.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/HBASE/test_hbase_master.py b/ambari-server/src/test/python/stacks/1.3.2/HBASE/test_hbase_master.py
index c3789a2..1d1baca 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/HBASE/test_hbase_master.py
+++ b/ambari-server/src/test/python/stacks/1.3.2/HBASE/test_hbase_master.py
@@ -164,11 +164,42 @@ class TestHBaseMaster(RMFTestCase):
     self.assertNoMoreResources()
 
   def assert_configure_default(self):
+    self.assertResourceCalled('HdfsDirectory', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              owner = 'hbase',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0711,
+                              owner = 'hbase',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/etc/hbase/conf',
       owner = 'hbase',
       group = 'hadoop',
       recursive = True,
-    )   
+    )
+    self.assertResourceCalled('Directory', '/hadoop/hbase',
+      owner = 'hbase',
+      recursive = True,
+    )
     self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
       owner = 'hbase',
       group = 'hadoop',
@@ -200,11 +231,7 @@ class TestHBaseMaster(RMFTestCase):
     self.assertResourceCalled('Directory', '/var/run/hbase',
       owner = 'hbase',
       recursive = True,
-    )   
-    self.assertResourceCalled('Directory', '/hadoop/hbase',
-      owner = 'hbase',
-      recursive = True,
-    )    
+    )
     self.assertResourceCalled('Directory', '/var/log/hbase',
       owner = 'hbase',
       recursive = True,
@@ -219,11 +246,42 @@ class TestHBaseMaster(RMFTestCase):
     )
   
   def assert_configure_secured(self):
+    self.assertResourceCalled('HdfsDirectory', 'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              owner = 'hbase',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0711,
+                              owner = 'hbase',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = True,
+                              keytab = '/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              action = ['create'],
+                              )
     self.assertResourceCalled('Directory', '/etc/hbase/conf',
       owner = 'hbase',
       group = 'hadoop',
       recursive = True,
     )
+    self.assertResourceCalled('Directory', '/hadoop/hbase',
+      owner = 'hbase',
+      recursive = True,
+    )
     self.assertResourceCalled('XmlConfig', 'hbase-site.xml',
       owner = 'hbase',
       group = 'hadoop',
@@ -260,10 +318,6 @@ class TestHBaseMaster(RMFTestCase):
       owner = 'hbase',
       recursive = True,
     )
-    self.assertResourceCalled('Directory', '/hadoop/hbase',
-      owner = 'hbase',
-      recursive = True,
-    )
     self.assertResourceCalled('Directory', '/var/log/hbase',
       owner = 'hbase',
       recursive = True,