You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by jl...@apache.org on 2017/10/09 06:01:07 UTC

[09/50] [abbrv] ambari git commit: AMBARI-22095 Make hooks stack agnostic (dsen)

AMBARI-22095 Make hooks stack agnostic (dsen)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/5b36cdfd
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/5b36cdfd
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/5b36cdfd

Branch: refs/heads/branch-feature-AMBARI-14714
Commit: 5b36cdfd87b756eba922dfd1ac5419552f4d375f
Parents: 05c7067
Author: Dmytro Sen <ds...@apache.org>
Authored: Tue Oct 3 15:45:22 2017 +0300
Committer: Dmytro Sen <ds...@apache.org>
Committed: Tue Oct 3 15:45:22 2017 +0300

----------------------------------------------------------------------
 ambari-agent/pom.xml                            |   4 +-
 .../src/main/python/ambari_agent/FileCache.py   |   5 +-
 .../test/python/ambari_agent/TestFileCache.py   |   4 +-
 ambari-server/pom.xml                           |   2 +
 ambari-server/src/main/assemblies/server.xml    |  20 +-
 .../actionmanager/ExecutionCommandWrapper.java  |   3 +-
 .../ambari/server/agent/HeartbeatMonitor.java   |   3 +-
 .../internal/UpgradeResourceProvider.java       |   3 +-
 .../ambari/server/stack/StackDirectory.java     |  29 +-
 .../ambari/server/stack/StackManager.java       |   5 +
 .../apache/ambari/server/stack/StackModule.java |   5 -
 .../apache/ambari/server/state/StackInfo.java   |   8 -
 .../python/ambari_server/resourceFilesKeeper.py |   7 +-
 .../python/ambari_server/serverConfiguration.py |   2 +-
 .../main/python/ambari_server/setupMpacks.py    |   2 +-
 .../main/resources/scripts/Ambaripreupload.py   |   4 +-
 .../scripts/post-user-creation-hook.sh          |   2 +-
 .../stack-hooks/after-INSTALL/scripts/hook.py   |  37 ++
 .../stack-hooks/after-INSTALL/scripts/params.py | 108 ++++++
 .../scripts/shared_initialization.py            | 132 +++++++
 .../before-ANY/files/changeToSecureUid.sh       |  64 ++++
 .../stack-hooks/before-ANY/scripts/hook.py      |  36 ++
 .../stack-hooks/before-ANY/scripts/params.py    | 254 +++++++++++++
 .../before-ANY/scripts/shared_initialization.py | 273 +++++++++++++
 .../stack-hooks/before-INSTALL/scripts/hook.py  |  37 ++
 .../before-INSTALL/scripts/params.py            | 115 ++++++
 .../scripts/repo_initialization.py              |  75 ++++
 .../scripts/shared_initialization.py            |  37 ++
 .../stack-hooks/before-RESTART/scripts/hook.py  |  29 ++
 .../before-START/files/checkForFormat.sh        |  65 ++++
 .../before-START/files/fast-hdfs-resource.jar   | Bin 0 -> 28296600 bytes
 .../before-START/files/task-log4j.properties    | 134 +++++++
 .../before-START/files/topology_script.py       |  66 ++++
 .../before-START/scripts/custom_extensions.py   | 173 +++++++++
 .../stack-hooks/before-START/scripts/hook.py    |  43 +++
 .../stack-hooks/before-START/scripts/params.py  | 380 +++++++++++++++++++
 .../before-START/scripts/rack_awareness.py      |  48 +++
 .../scripts/shared_initialization.py            | 256 +++++++++++++
 .../templates/commons-logging.properties.j2     |  43 +++
 .../templates/exclude_hosts_list.j2             |  21 +
 .../templates/hadoop-metrics2.properties.j2     | 107 ++++++
 .../before-START/templates/health_check.j2      |  81 ++++
 .../templates/include_hosts_list.j2             |  21 +
 .../templates/topology_mappings.data.j2         |  24 ++
 .../2.0.6/hooks/after-INSTALL/scripts/hook.py   |  37 --
 .../2.0.6/hooks/after-INSTALL/scripts/params.py | 108 ------
 .../scripts/shared_initialization.py            | 132 -------
 .../hooks/before-ANY/files/changeToSecureUid.sh |  64 ----
 .../HDP/2.0.6/hooks/before-ANY/scripts/hook.py  |  36 --
 .../2.0.6/hooks/before-ANY/scripts/params.py    | 254 -------------
 .../before-ANY/scripts/shared_initialization.py | 273 -------------
 .../2.0.6/hooks/before-INSTALL/scripts/hook.py  |  37 --
 .../hooks/before-INSTALL/scripts/params.py      | 115 ------
 .../scripts/repo_initialization.py              |  75 ----
 .../scripts/shared_initialization.py            |  37 --
 .../2.0.6/hooks/before-RESTART/scripts/hook.py  |  29 --
 .../hooks/before-START/files/checkForFormat.sh  |  65 ----
 .../before-START/files/fast-hdfs-resource.jar   | Bin 28296600 -> 0 bytes
 .../before-START/files/task-log4j.properties    | 134 -------
 .../hooks/before-START/files/topology_script.py |  66 ----
 .../before-START/scripts/custom_extensions.py   | 173 ---------
 .../2.0.6/hooks/before-START/scripts/hook.py    |  43 ---
 .../2.0.6/hooks/before-START/scripts/params.py  | 380 -------------------
 .../before-START/scripts/rack_awareness.py      |  48 ---
 .../scripts/shared_initialization.py            | 256 -------------
 .../templates/commons-logging.properties.j2     |  43 ---
 .../templates/exclude_hosts_list.j2             |  21 -
 .../templates/hadoop-metrics2.properties.j2     | 107 ------
 .../before-START/templates/health_check.j2      |  81 ----
 .../templates/include_hosts_list.j2             |  21 -
 .../templates/topology_mappings.data.j2         |  24 --
 .../services/ECS/package/scripts/ecs_client.py  |   2 +-
 .../HDP/3.0/hooks/after-INSTALL/scripts/hook.py |  37 --
 .../3.0/hooks/after-INSTALL/scripts/params.py   | 109 ------
 .../scripts/shared_initialization.py            | 140 -------
 .../hooks/before-ANY/files/changeToSecureUid.sh |  53 ---
 .../HDP/3.0/hooks/before-ANY/scripts/hook.py    |  36 --
 .../HDP/3.0/hooks/before-ANY/scripts/params.py  | 254 -------------
 .../before-ANY/scripts/shared_initialization.py | 239 ------------
 .../3.0/hooks/before-INSTALL/scripts/hook.py    |  37 --
 .../3.0/hooks/before-INSTALL/scripts/params.py  | 115 ------
 .../scripts/repo_initialization.py              |  76 ----
 .../scripts/shared_initialization.py            |  37 --
 .../3.0/hooks/before-RESTART/scripts/hook.py    |  29 --
 .../hooks/before-START/files/checkForFormat.sh  |  65 ----
 .../before-START/files/fast-hdfs-resource.jar   | Bin 28296600 -> 0 bytes
 .../before-START/files/task-log4j.properties    | 134 -------
 .../hooks/before-START/files/topology_script.py |  66 ----
 .../HDP/3.0/hooks/before-START/scripts/hook.py  |  40 --
 .../3.0/hooks/before-START/scripts/params.py    | 364 ------------------
 .../before-START/scripts/rack_awareness.py      |  47 ---
 .../scripts/shared_initialization.py            | 249 ------------
 .../templates/commons-logging.properties.j2     |  43 ---
 .../templates/exclude_hosts_list.j2             |  21 -
 .../templates/hadoop-metrics2.properties.j2     | 107 ------
 .../before-START/templates/health_check.j2      |  81 ----
 .../templates/include_hosts_list.j2             |  21 -
 .../templates/topology_mappings.data.j2         |  24 --
 .../server/api/services/AmbariMetaInfoTest.java |  19 -
 .../src/test/python/TestResourceFilesKeeper.py  |   1 +
 .../hooks/after-INSTALL/test_after_install.py   |  26 +-
 .../2.0.6/hooks/before-ANY/test_before_any.py   |   5 +-
 .../hooks/before-INSTALL/test_before_install.py |  14 +-
 .../hooks/before-START/test_before_start.py     |  21 +-
 .../src/test/python/stacks/utils/RMFTestCase.py |   8 +
 .../src/main/assemblies/hdf-ambari-mpack.xml    |   1 +
 106 files changed, 2766 insertions(+), 5181 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/5b36cdfd/ambari-agent/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-agent/pom.xml b/ambari-agent/pom.xml
index 8673f2e..eede374 100644
--- a/ambari-agent/pom.xml
+++ b/ambari-agent/pom.xml
@@ -318,8 +318,7 @@
                     <include>/cred/lib/*.jar</include>
                     <include>/tools/*.jar</include>
                     <include>/cache/stacks/HDP/2.1.GlusterFS/services/STORM/package/files/wordCount.jar</include>
-                    <include>/cache/stacks/HDP/2.0.6/hooks/before-START/files/fast-hdfs-resource.jar</include>
-                    <include>/cache/stacks/HDP/3.0/hooks/before-START/files/fast-hdfs-resource.jar</include>
+                    <include>/cache/stack-hooks/before-START/files/fast-hdfs-resource.jar</include>
                     <include>/cache/common-services/STORM/0.9.1/package/files/wordCount.jar</include>
                   </includes>
                 </source>
@@ -487,6 +486,7 @@
                   <directory>${resourcesFolder}</directory>
                   <includes>
                     <include>common-services/**</include>
+                    <include>stack-hooks/**</include>
                     <include>stacks/stack_advisor.py</include>
                     <include>stacks/${stack.distribution}/**/*</include>
                   </includes>

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b36cdfd/ambari-agent/src/main/python/ambari_agent/FileCache.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/FileCache.py b/ambari-agent/src/main/python/ambari_agent/FileCache.py
index d0c8bdb..28912d1 100644
--- a/ambari-agent/src/main/python/ambari_agent/FileCache.py
+++ b/ambari-agent/src/main/python/ambari_agent/FileCache.py
@@ -83,11 +83,10 @@ class FileCache():
     Returns a base directory for hooks
     """
     try:
-      hooks_subpath = command['commandParams']['hooks_folder']
+      hooks_path = command['commandParams']['hooks_folder']
     except KeyError:
       return None
-    subpath = os.path.join(self.STACKS_CACHE_DIRECTORY, hooks_subpath)
-    return self.provide_directory(self.cache_dir, subpath,
+    return self.provide_directory(self.cache_dir, hooks_path,
                                   server_url_prefix)
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b36cdfd/ambari-agent/src/test/python/ambari_agent/TestFileCache.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestFileCache.py b/ambari-agent/src/test/python/ambari_agent/TestFileCache.py
index 00f6b69..68cc8d9 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestFileCache.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestFileCache.py
@@ -93,7 +93,7 @@ class TestFileCache(TestCase):
     # Check existing dir case
     command = {
       'commandParams' : {
-        'hooks_folder' : os.path.join('HDP', '2.1.1', 'hooks')
+        'hooks_folder' : 'stack-hooks'
       }
     }
     provide_directory_mock.return_value = "dummy value"
@@ -103,7 +103,7 @@ class TestFileCache(TestCase):
       pprint.pformat(provide_directory_mock.call_args_list[0][0]),
       "('/var/lib/ambari-agent/cache', "
       "{0}, "
-      "'server_url_pref')".format(pprint.pformat(os.path.join('stacks','HDP', '2.1.1', 'hooks'))))
+      "'server_url_pref')".format(pprint.pformat('stack-hooks')))
     self.assertEquals(res, "dummy value")
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b36cdfd/ambari-server/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-server/pom.xml b/ambari-server/pom.xml
index 3d22044..e250da7 100644
--- a/ambari-server/pom.xml
+++ b/ambari-server/pom.xml
@@ -44,6 +44,7 @@
     <customActionsRoot>src/main/resources/custom_actions</customActionsRoot>
     <ambariProperties>conf/unix/ambari.properties</ambariProperties>
     <commonServicesSrcLocation>src/main/resources/common-services</commonServicesSrcLocation>
+    <stackHooksLocation>src/main/resources/stack-hooks</stackHooksLocation>
     <stacksSrcLocation>src/main/resources/stacks/${stack.distribution}</stacksSrcLocation>
     <tarballResourcesFolder>src/main/resources</tarballResourcesFolder>
     <skipPythonTests>false</skipPythonTests>
@@ -1067,6 +1068,7 @@
         <ambariProperties>target/pluggable-stack-definition/conf/unix/ambari.properties</ambariProperties>
         <resourceManagementSrcLocation>target/pluggable-stack-definition/python/resource_management</resourceManagementSrcLocation>
         <commonServicesSrcLocation>target/pluggable-stack-definition/common-services</commonServicesSrcLocation>
+        <stackHooksLocation>target/pluggable-stack-definition/stack-hooks</stackHooksLocation>
         <stacksSrcLocation>target/pluggable-stack-definition/stacks/${stack.distribution}</stacksSrcLocation>
         <resourcesSrcLocation>src/main/resources</resourcesSrcLocation>
         <tarballResourcesFolder>target/pluggable-stack-definition</tarballResourcesFolder>

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b36cdfd/ambari-server/src/main/assemblies/server.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/assemblies/server.xml b/ambari-server/src/main/assemblies/server.xml
index c26a769..3079d1b 100644
--- a/ambari-server/src/main/assemblies/server.xml
+++ b/ambari-server/src/main/assemblies/server.xml
@@ -167,6 +167,22 @@
     </fileSet>
     <fileSet>
       <fileMode>755</fileMode>
+      <directory>${stackHooksLocation}</directory>
+      <outputDirectory>/var/lib/ambari-server/resources/stack-hooks</outputDirectory>
+      <excludes>
+	    <exclude>/before-START/files/fast-hdfs-resource.jar</exclude>
+	  </excludes>
+    </fileSet>
+    <fileSet>
+      <fileMode>644</fileMode>
+      <directory>${stackHooksLocation}</directory>
+      <outputDirectory>/var/lib/ambari-server/resources/stack-hooks</outputDirectory>
+      <includes>
+	    <include>/before-START/files/fast-hdfs-resource.jar</include>
+	  </includes>
+    </fileSet>
+    <fileSet>
+      <fileMode>755</fileMode>
       <directory>src/main/resources/upgrade/catalog</directory>
       <outputDirectory>/var/lib/ambari-server/resources/upgrade/catalog</outputDirectory>
     </fileSet>
@@ -176,8 +192,6 @@
       <outputDirectory>/var/lib/ambari-server/resources/stacks/${stack.distribution}</outputDirectory>
       <excludes>
 	    <exclude>/2.1.GlusterFS/services/STORM/package/files/wordCount.jar</exclude>
-        <exclude>/2.0.6/hooks/before-START/files/fast-hdfs-resource.jar</exclude>
-        <exclude>/3.0/hooks/before-START/files/fast-hdfs-resource.jar</exclude>
 	  </excludes>
     </fileSet>
     <fileSet>
@@ -186,8 +200,6 @@
       <outputDirectory>/var/lib/ambari-server/resources/stacks/${stack.distribution}</outputDirectory>
       <includes>
 	    <include>/2.1.GlusterFS/services/STORM/package/files/wordCount.jar</include>
-        <include>/2.0.6/hooks/before-START/files/fast-hdfs-resource.jar</include>
-        <include>/3.0/hooks/before-START/files/fast-hdfs-resource.jar</include>
 	  </includes>
     </fileSet>
     <fileSet>

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b36cdfd/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapper.java b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapper.java
index 47aa093..ff13d0b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/actionmanager/ExecutionCommandWrapper.java
@@ -20,6 +20,7 @@ package org.apache.ambari.server.actionmanager;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.HOOKS_FOLDER;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SERVICE_PACKAGE_FOLDER;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.VERSION;
+import static org.apache.ambari.server.stack.StackManager.DEFAULT_HOOKS_FOLDER;
 
 import java.util.HashMap;
 import java.util.Map;
@@ -268,7 +269,7 @@ public class ExecutionCommandWrapper {
           stackId.getStackVersion());
 
         if (!commandParams.containsKey(HOOKS_FOLDER)) {
-          commandParams.put(HOOKS_FOLDER, stackInfo.getStackHooksFolder());
+          commandParams.put(HOOKS_FOLDER, DEFAULT_HOOKS_FOLDER);
         }
 
         if (!commandParams.containsKey(SERVICE_PACKAGE_FOLDER)) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b36cdfd/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java
index d83a5d1..c13df6b 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java
@@ -25,6 +25,7 @@ import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SCRIPT_TY
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SERVICE_PACKAGE_FOLDER;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.STACK_NAME;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.STACK_VERSION;
+import static org.apache.ambari.server.stack.StackManager.DEFAULT_HOOKS_FOLDER;
 
 import java.util.ArrayList;
 import java.util.Collection;
@@ -340,7 +341,7 @@ public class HeartbeatMonitor implements Runnable {
     commandParams.put(COMMAND_TIMEOUT, commandTimeout);
     commandParams.put(SERVICE_PACKAGE_FOLDER,
        serviceInfo.getServicePackageFolder());
-    commandParams.put(HOOKS_FOLDER, stackInfo.getStackHooksFolder());
+    commandParams.put(HOOKS_FOLDER, DEFAULT_HOOKS_FOLDER);
     // Fill host level params
     Map<String, String> hostLevelParams = statusCmd.getHostLevelParams();
     hostLevelParams.put(JDK_LOCATION, ambariManagementController.getJdkResourceUrl());

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b36cdfd/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
index 52f66bc..33ce25e 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UpgradeResourceProvider.java
@@ -19,6 +19,7 @@ package org.apache.ambari.server.controller.internal;
 
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.HOOKS_FOLDER;
 import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.SERVICE_PACKAGE_FOLDER;
+import static org.apache.ambari.server.stack.StackManager.DEFAULT_HOOKS_FOLDER;
 
 import java.text.MessageFormat;
 import java.util.ArrayList;
@@ -907,7 +908,7 @@ public class UpgradeResourceProvider extends AbstractControllerResourceProvider
           effectiveStackId.getStackVersion(), serviceName);
 
       commandParams.put(SERVICE_PACKAGE_FOLDER, serviceInfo.getServicePackageFolder());
-      commandParams.put(HOOKS_FOLDER, stackInfo.getStackHooksFolder());
+      commandParams.put(HOOKS_FOLDER, DEFAULT_HOOKS_FOLDER);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b36cdfd/ambari-server/src/main/java/org/apache/ambari/server/stack/StackDirectory.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/StackDirectory.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackDirectory.java
index 0e59c95..9259466 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/stack/StackDirectory.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackDirectory.java
@@ -72,10 +72,6 @@ public class StackDirectory extends StackDefinitionDirectory {
    * Filename for theme file at service layer
    */
   public static final String SERVICE_THEME_FILE_NAME = "theme.json";
-  /**
-   * hooks directory path
-   */
-  private String hooksDir;
 
   /**
    * upgrades directory path
@@ -142,15 +138,10 @@ public class StackDirectory extends StackDefinitionDirectory {
    */
   ModuleFileUnmarshaller unmarshaller = new ModuleFileUnmarshaller();
 
-  /**
-   * name of the hooks directory
-   */
-  public static final String HOOKS_FOLDER_NAME = "hooks";
   public static final FilenameFilter FILENAME_FILTER = new FilenameFilter() {
     @Override
     public boolean accept(File dir, String s) {
-      return !(s.equals(".svn") || s.equals(".git") ||
-          s.equals(HOOKS_FOLDER_NAME));
+      return !(s.equals(".svn") || s.equals(".git"));
     }
   };
 
@@ -206,15 +197,6 @@ public class StackDirectory extends StackDefinitionDirectory {
   }
 
   /**
-   * Obtain the hooks directory path.
-   *
-   * @return hooks directory path
-   */
-  public String getHooksDir() {
-    return hooksDir;
-  }
-
-  /**
    * Obtain the upgrades directory path.
    *
    * @return upgrades directory path
@@ -327,20 +309,11 @@ public class StackDirectory extends StackDefinitionDirectory {
    */
   private void parsePath() throws AmbariException {
     Collection<String> subDirs = Arrays.asList(directory.list());
-    if (subDirs.contains(HOOKS_FOLDER_NAME)) {
-      // hooksDir is expected to be relative to stack root
-      hooksDir = getStackDirName() + File.separator + getName() +
-          File.separator + HOOKS_FOLDER_NAME;
-    } else {
-      LOG.debug("Hooks folder {}{}" + HOOKS_FOLDER_NAME + " does not exist", getAbsolutePath(), File.separator);
-    }
-
     if (subDirs.contains(RCO_FILE_NAME)) {
       // rcoFile is expected to be absolute
       rcoFilePath = getAbsolutePath() + File.separator + RCO_FILE_NAME;
     }
 
-
     if (subDirs.contains(KERBEROS_DESCRIPTOR_FILE_NAME)) {
       // kerberosDescriptorFilePath is expected to be absolute
       kerberosDescriptorFilePath = getAbsolutePath() + File.separator + KERBEROS_DESCRIPTOR_FILE_NAME;

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b36cdfd/ambari-server/src/main/java/org/apache/ambari/server/stack/StackManager.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/StackManager.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackManager.java
index b11ecab..eb6737a 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/stack/StackManager.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackManager.java
@@ -79,6 +79,11 @@ public class StackManager {
   public static final String COMMON_SERVICES = "common-services";
 
   /**
+   * Prefix used for common stack hooks parent path string
+   */
+  public static final String DEFAULT_HOOKS_FOLDER = "stack-hooks";
+
+  /**
    * Prefix used for extension services parent path string
    */
   public static final String EXTENSIONS = "extensions";

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b36cdfd/ambari-server/src/main/java/org/apache/ambari/server/stack/StackModule.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/StackModule.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackModule.java
index 6dc2b93..742706d 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/stack/StackModule.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackModule.java
@@ -284,10 +284,6 @@ public class StackModule extends BaseModule<StackModule, StackInfo> implements V
     mergeConfigurations(parentStack, allStacks, commonServices, extensions);
     mergeRoleCommandOrder(parentStack);
 
-    if (stackInfo.getStackHooksFolder() == null) {
-      stackInfo.setStackHooksFolder(parentStack.getModuleInfo().getStackHooksFolder());
-    }
-
     // grab stack level kerberos.json from parent stack
     if (stackInfo.getKerberosDescriptorFileLocation() == null) {
       stackInfo.setKerberosDescriptorFileLocation(parentStack.getModuleInfo().getKerberosDescriptorFileLocation());
@@ -574,7 +570,6 @@ public class StackModule extends BaseModule<StackModule, StackInfo> implements V
       stackInfo.setMinUpgradeVersion(smx.getVersion().getUpgrade());
       stackInfo.setActive(smx.getVersion().isActive());
       stackInfo.setParentStackVersion(smx.getExtends());
-      stackInfo.setStackHooksFolder(stackDirectory.getHooksDir());
       stackInfo.setRcoFileLocation(stackDirectory.getRcoFilePath());
       stackInfo.setKerberosDescriptorFileLocation(stackDirectory.getKerberosDescriptorFilePath());
       stackInfo.setKerberosDescriptorPreConfigurationFileLocation(stackDirectory.getKerberosDescriptorPreconfigureFilePath());

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b36cdfd/ambari-server/src/main/java/org/apache/ambari/server/state/StackInfo.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/StackInfo.java b/ambari-server/src/main/java/org/apache/ambari/server/state/StackInfo.java
index a3886ab..dcf850f 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/StackInfo.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/StackInfo.java
@@ -437,14 +437,6 @@ public class StackInfo implements Comparable<StackInfo>, Validable {
     this.widgetsDescriptorFileLocation = widgetsDescriptorFileLocation;
   }
 
-  public String getStackHooksFolder() {
-    return stackHooksFolder;
-  }
-
-  public void setStackHooksFolder(String stackHooksFolder) {
-    this.stackHooksFolder = stackHooksFolder;
-  }
-
   /**
    * Set the path of the stack upgrade directory.
    *

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b36cdfd/ambari-server/src/main/python/ambari_server/resourceFilesKeeper.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/ambari_server/resourceFilesKeeper.py b/ambari-server/src/main/python/ambari_server/resourceFilesKeeper.py
index b41c400..06d16fa 100644
--- a/ambari-server/src/main/python/ambari_server/resourceFilesKeeper.py
+++ b/ambari-server/src/main/python/ambari_server/resourceFilesKeeper.py
@@ -33,7 +33,7 @@ class ResourceFilesKeeper():
   This class encapsulates all utility methods for resource files maintenance.
   """
 
-  HOOKS_DIR="hooks"
+  STACK_HOOKS_DIR= "stack-hooks"
   PACKAGE_DIR="package"
   STACKS_DIR="stacks"
   COMMON_SERVICES_DIR="common-services"
@@ -43,7 +43,7 @@ class ResourceFilesKeeper():
   EXTENSIONS_DIR="extensions"
 
   # For these directories archives are created
-  ARCHIVABLE_DIRS = [HOOKS_DIR, PACKAGE_DIR]
+  ARCHIVABLE_DIRS = [PACKAGE_DIR]
 
   HASH_SUM_FILE=".hash"
   ARCHIVE_NAME="archive.zip"
@@ -116,6 +116,9 @@ class ResourceFilesKeeper():
     # Iterate over extension directories
     self._iter_update_directory_archive(valid_extensions)
 
+    # stack hooks
+    self._update_resources_subdir_archive(self.STACK_HOOKS_DIR)
+
     # custom actions
     self._update_resources_subdir_archive(self.CUSTOM_ACTIONS_DIR)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b36cdfd/ambari-server/src/main/python/ambari_server/serverConfiguration.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/ambari_server/serverConfiguration.py b/ambari-server/src/main/python/ambari_server/serverConfiguration.py
index bd64b0e..df89f79 100644
--- a/ambari-server/src/main/python/ambari_server/serverConfiguration.py
+++ b/ambari-server/src/main/python/ambari_server/serverConfiguration.py
@@ -562,7 +562,7 @@ class ServerConfigDefaultsLinux(ServerConfigDefaults):
       (AmbariPath.get("/var/lib/ambari-server/data/cache/"), "700", "{0}", False),
       (AmbariPath.get("/var/lib/ambari-server/resources/common-services/STORM/0.9.1/package/files/wordCount.jar"), "644", "{0}", False),
       (AmbariPath.get("/var/lib/ambari-server/resources/stacks/HDP/2.1.GlusterFS/services/STORM/package/files/wordCount.jar"), "644", "{0}", False),
-      (AmbariPath.get("/var/lib/ambari-server/resources/stacks/HDP/2.0.6/hooks/before-START/files/fast-hdfs-resource.jar"), "644", "{0}", False),
+      (AmbariPath.get("/var/lib/ambari-server/resources/stack-hooks/before-START/files/fast-hdfs-resource.jar"), "644", "{0}", False),
       (AmbariPath.get("/var/lib/ambari-server/resources/stacks/HDP/2.1/services/SMARTSENSE/package/files/view/smartsense-ambari-view-1.4.0.0.60.jar"), "644", "{0}", False),
       (AmbariPath.get("/var/lib/ambari-server/resources/stacks/HDP/3.0/hooks/before-START/files/fast-hdfs-resource.jar"), "644", "{0}", False),
       # Also, /etc/ambari-server/conf/password.dat

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b36cdfd/ambari-server/src/main/python/ambari_server/setupMpacks.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/ambari_server/setupMpacks.py b/ambari-server/src/main/python/ambari_server/setupMpacks.py
index 6f232f4..7a9b2b8 100755
--- a/ambari-server/src/main/python/ambari_server/setupMpacks.py
+++ b/ambari-server/src/main/python/ambari_server/setupMpacks.py
@@ -818,7 +818,7 @@ def _install_mpack(options, replay_mode=False, is_upgrade=False):
 
   print_info_msg("Management pack {0}-{1} successfully installed! Please restart ambari-server.".format(mpack_name, mpack_version))
   return mpack_metadata, mpack_name, mpack_version, mpack_staging_dir, mpack_archive_path
-
+# TODO
 def _execute_hook(mpack_metadata, hook_name, base_dir):
   if "hooks" in mpack_metadata:
     hooks = mpack_metadata["hooks"]

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b36cdfd/ambari-server/src/main/resources/scripts/Ambaripreupload.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/scripts/Ambaripreupload.py b/ambari-server/src/main/resources/scripts/Ambaripreupload.py
index 132a5b9..a8665b1 100644
--- a/ambari-server/src/main/resources/scripts/Ambaripreupload.py
+++ b/ambari-server/src/main/resources/scripts/Ambaripreupload.py
@@ -439,7 +439,7 @@ with Environment() as env:
   # jar shouldn't be used before (read comment below)
   File(format("{ambari_libs_dir}/fast-hdfs-resource.jar"),
        mode=0644,
-       content=StaticFile("/var/lib/ambari-agent/cache/stacks/HDP/2.0.6/hooks/before-START/files/fast-hdfs-resource.jar")
+       content=StaticFile("/var/lib/ambari-agent/cache/stack-hooks/before-START/files/fast-hdfs-resource.jar")
   )
   # Create everything in one jar call (this is fast).
   # (! Before everything should be executed with action="create_on_execute/delete_on_execute" for this time-optimization to work)
@@ -460,4 +460,4 @@ with Environment() as env:
       sudo = True
     )
 
-  print "Ambari preupload script completed."
\ No newline at end of file
+  print "Ambari preupload script completed."

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b36cdfd/ambari-server/src/main/resources/scripts/post-user-creation-hook.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/scripts/post-user-creation-hook.sh b/ambari-server/src/main/resources/scripts/post-user-creation-hook.sh
index d85741b..5318fc5 100755
--- a/ambari-server/src/main/resources/scripts/post-user-creation-hook.sh
+++ b/ambari-server/src/main/resources/scripts/post-user-creation-hook.sh
@@ -135,7 +135,7 @@ check_tools
 prepare_input
 
 # the default implementation creates user home folders; the first argument must be the username
-ambari_sudo "yarn jar /var/lib/ambari-server/resources/stacks/HDP/2.0.6/hooks/before-START/files/fast-hdfs-resource.jar $JSON_INPUT"
+ambari_sudo "yarn jar /var/lib/ambari-server/resources/stack-hooks/before-START/files/fast-hdfs-resource.jar $JSON_INPUT"
 
 if [ "$DEBUG" -gt "0" ]; then echo "Switch debug OFF";set -x;unset DEBUG; else echo "debug: OFF"; fi
 unset DEBUG

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b36cdfd/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/hook.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/hook.py b/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/hook.py
new file mode 100644
index 0000000..8bae9e6
--- /dev/null
+++ b/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/hook.py
@@ -0,0 +1,37 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management.libraries.script.hook import Hook
+from shared_initialization import link_configs
+from shared_initialization import setup_config
+from shared_initialization import setup_stack_symlinks
+
+class AfterInstallHook(Hook):
+
+  def hook(self, env):
+    import params
+
+    env.set_params(params)
+    setup_stack_symlinks(self.stroutfile)
+    setup_config()
+
+    link_configs(self.stroutfile)
+
+if __name__ == "__main__":
+  AfterInstallHook().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b36cdfd/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/params.py b/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/params.py
new file mode 100644
index 0000000..bf9d79b
--- /dev/null
+++ b/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/params.py
@@ -0,0 +1,108 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import os
+
+from ambari_commons.constants import AMBARI_SUDO_BINARY
+from resource_management.libraries.script import Script
+from resource_management.libraries.script.script import get_config_lock_file
+from resource_management.libraries.functions import default
+from resource_management.libraries.functions import conf_select
+from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions import format_jvm_option
+from resource_management.libraries.functions.version import format_stack_version, get_major_version
+from string import lower
+
+config = Script.get_config()
+tmp_dir = Script.get_tmp_dir()
+
+dfs_type = default("/commandParams/dfs_type", "")
+
+is_parallel_execution_enabled = int(default("/agentConfigParams/agent/parallel_execution", 0)) == 1
+host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
+
+sudo = AMBARI_SUDO_BINARY
+
+stack_version_unformatted = config['hostLevelParams']['stack_version']
+stack_version_formatted = format_stack_version(stack_version_unformatted)
+major_stack_version = get_major_version(stack_version_formatted)
+
+# service name
+service_name = config['serviceName']
+
+# logsearch configuration
+logsearch_logfeeder_conf = "/etc/ambari-logsearch-logfeeder/conf"
+
+agent_cache_dir = config['hostLevelParams']['agentCacheDir']
+service_package_folder = config['commandParams']['service_package_folder']
+logsearch_service_name = service_name.lower().replace("_", "-")
+logsearch_config_file_name = 'input.config-' + logsearch_service_name + ".json"
+logsearch_config_file_path = agent_cache_dir + "/" + service_package_folder + "/templates/" + logsearch_config_file_name + ".j2"
+logsearch_config_file_exists = os.path.isfile(logsearch_config_file_path)
+
+# default hadoop params
+hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec")
+
+mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
+
+versioned_stack_root = '/usr/hdp/current'
+
+#security params
+security_enabled = config['configurations']['cluster-env']['security_enabled']
+
+#java params
+java_home = config['hostLevelParams']['java_home']
+
+#hadoop params
+hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
+hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
+hadoop_root_logger = config['configurations']['hadoop-env']['hadoop_root_logger']
+
+jsvc_path = "/usr/lib/bigtop-utils"
+
+hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
+namenode_heapsize = config['configurations']['hadoop-env']['namenode_heapsize']
+namenode_opt_newsize = config['configurations']['hadoop-env']['namenode_opt_newsize']
+namenode_opt_maxnewsize = config['configurations']['hadoop-env']['namenode_opt_maxnewsize']
+namenode_opt_permsize = format_jvm_option("/configurations/hadoop-env/namenode_opt_permsize","128m")
+namenode_opt_maxpermsize = format_jvm_option("/configurations/hadoop-env/namenode_opt_maxpermsize","256m")
+
+jtnode_opt_newsize = "200m"
+jtnode_opt_maxnewsize = "200m"
+jtnode_heapsize =  "1024m"
+ttnode_heapsize = "1024m"
+
+dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize']
+mapred_pid_dir_prefix = default("/configurations/mapred-env/mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
+mapred_log_dir_prefix = default("/configurations/mapred-env/mapred_log_dir_prefix","/var/log/hadoop-mapreduce")
+
+#users and groups
+hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+user_group = config['configurations']['cluster-env']['user_group']
+
+namenode_host = default("/clusterHostInfo/namenode_host", [])
+has_namenode = not len(namenode_host) == 0
+
+if has_namenode or dfs_type == 'HCFS':
+  hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
+
+link_configs_lock_file = get_config_lock_file()
+stack_select_lock_file = os.path.join(tmp_dir, "stack_select_lock_file")
+
+upgrade_suspended = default("/roleParams/upgrade_suspended", False)

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b36cdfd/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/shared_initialization.py b/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/shared_initialization.py
new file mode 100644
index 0000000..67c3ba8
--- /dev/null
+++ b/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/shared_initialization.py
@@ -0,0 +1,132 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+import os
+
+import ambari_simplejson as json
+from ambari_jinja2 import Environment as JinjaEnvironment
+from resource_management.core.logger import Logger
+from resource_management.core.resources.system import Directory, File
+from resource_management.core.source import InlineTemplate, Template
+from resource_management.libraries.functions import conf_select
+from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions.default import default
+from resource_management.libraries.functions.format import format
+from resource_management.libraries.functions.fcntl_based_process_lock import FcntlBasedProcessLock
+from resource_management.libraries.resources.xml_config import XmlConfig
+from resource_management.libraries.script import Script
+
+
+def setup_stack_symlinks(struct_out_file):
+  """
+  Invokes <stack-selector-tool> set all against a calculated fully-qualified, "normalized" version based on a
+  stack version, such as "2.3". This should always be called after a component has been
+  installed to ensure that all HDP pointers are correct. The stack upgrade logic does not
+  interact with this since it's done via a custom command and will not trigger this hook.
+  :return:
+  """
+  import params
+  if params.upgrade_suspended:
+    Logger.warning("Skipping running stack-selector-tool because there is a suspended upgrade")
+    return
+
+  if params.host_sys_prepped:
+    Logger.warning("Skipping running stack-selector-tool because this is a sys_prepped host. This may cause symlink pointers not to be created for HDP components installed later on top of an already sys_prepped host")
+    return
+
+  # get the packages which the stack-select tool should be used on
+  stack_packages = stack_select.get_packages(stack_select.PACKAGE_SCOPE_INSTALL)
+  if stack_packages is None:
+    return
+
+  json_version = load_version(struct_out_file)
+
+  if not json_version:
+    Logger.info("There is no advertised version for this component stored in {0}".format(struct_out_file))
+    return
+
+  # On parallel command execution this should be executed by a single process at a time.
+  with FcntlBasedProcessLock(params.stack_select_lock_file, enabled = params.is_parallel_execution_enabled, skip_fcntl_failures = True):
+    for package in stack_packages:
+      stack_select.select(package, json_version)
+
+
+def setup_config():
+  import params
+  stackversion = params.stack_version_unformatted
+  Logger.info("FS Type: {0}".format(params.dfs_type))
+
+  is_hadoop_conf_dir_present = False
+  if hasattr(params, "hadoop_conf_dir") and params.hadoop_conf_dir is not None and os.path.exists(params.hadoop_conf_dir):
+    is_hadoop_conf_dir_present = True
+  else:
+    Logger.warning("Parameter hadoop_conf_dir is missing or directory does not exist. This is expected if this host does not have any Hadoop components.")
+
+  if is_hadoop_conf_dir_present and (params.has_namenode or stackversion.find('Gluster') >= 0 or params.dfs_type == 'HCFS'):
+    # create core-site only if the hadoop config diretory exists
+    XmlConfig("core-site.xml",
+              conf_dir=params.hadoop_conf_dir,
+              configurations=params.config['configurations']['core-site'],
+              configuration_attributes=params.config['configuration_attributes']['core-site'],
+              owner=params.hdfs_user,
+              group=params.user_group,
+              only_if=format("ls {hadoop_conf_dir}"))
+
+  Directory(params.logsearch_logfeeder_conf,
+            mode=0755,
+            cd_access='a',
+            create_parents=True
+            )
+
+  if params.logsearch_config_file_exists:
+    File(format("{logsearch_logfeeder_conf}/" + params.logsearch_config_file_name),
+         content=Template(params.logsearch_config_file_path,extra_imports=[default])
+         )
+  else:
+    Logger.warning('No logsearch configuration exists at ' + params.logsearch_config_file_path)
+
+
+def load_version(struct_out_file):
+  """
+  Load version from file.  Made a separate method for testing
+  """
+  try:
+    with open(struct_out_file, 'r') as fp:
+      json_info = json.load(fp)
+
+    return json_info['version']
+  except (IOError, KeyError, TypeError):
+    return None
+
+
+def link_configs(struct_out_file):
+  """
+  Links configs, only on a fresh install of HDP-2.3 and higher
+  """
+  import params
+
+  json_version = load_version(struct_out_file)
+
+  if not json_version:
+    Logger.info("Could not load 'version' from {0}".format(struct_out_file))
+    return
+
+  # On parallel command execution this should be executed by a single process at a time.
+  with FcntlBasedProcessLock(params.link_configs_lock_file, enabled = params.is_parallel_execution_enabled, skip_fcntl_failures = True):
+    for k, v in conf_select.get_package_dirs().iteritems():
+      conf_select.convert_conf_directories_to_symlinks(k, json_version, v)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b36cdfd/ambari-server/src/main/resources/stack-hooks/before-ANY/files/changeToSecureUid.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stack-hooks/before-ANY/files/changeToSecureUid.sh b/ambari-server/src/main/resources/stack-hooks/before-ANY/files/changeToSecureUid.sh
new file mode 100644
index 0000000..a6b8b77
--- /dev/null
+++ b/ambari-server/src/main/resources/stack-hooks/before-ANY/files/changeToSecureUid.sh
@@ -0,0 +1,64 @@
+#!/usr/bin/env bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+username=$1
+directories=$2
+newUid=$3
+
+function find_available_uid() {
+ for ((i=1001; i<=2000; i++))
+ do
+   grep -q $i /etc/passwd
+   if [ "$?" -ne 0 ]
+   then
+    newUid=$i
+    break
+   fi
+ done
+}
+
+if [ -z $2 ]; then
+  test $(id -u ${username} 2>/dev/null)
+  if [ $? -ne 1 ]; then
+   newUid=`id -u ${username}`
+  else
+   find_available_uid
+  fi
+  echo $newUid
+  exit 0
+else
+  find_available_uid
+fi
+
+if [ $newUid -eq 0 ]
+then
+  echo "Failed to find Uid between 1000 and 2000"
+  exit 1
+fi
+
+set -e
+dir_array=($(echo $directories | sed 's/,/\n/g'))
+old_uid=$(id -u $username)
+sudo_prefix="/var/lib/ambari-agent/ambari-sudo.sh -H -E"
+echo "Changing uid of $username from $old_uid to $newUid"
+echo "Changing directory permisions for ${dir_array[@]}"
+$sudo_prefix usermod -u $newUid $username && for dir in ${dir_array[@]} ; do ls $dir 2> /dev/null && echo "Changing permission for $dir" && $sudo_prefix chown -Rh $newUid $dir ; done
+exit 0

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b36cdfd/ambari-server/src/main/resources/stack-hooks/before-ANY/scripts/hook.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stack-hooks/before-ANY/scripts/hook.py b/ambari-server/src/main/resources/stack-hooks/before-ANY/scripts/hook.py
new file mode 100644
index 0000000..c34be0b
--- /dev/null
+++ b/ambari-server/src/main/resources/stack-hooks/before-ANY/scripts/hook.py
@@ -0,0 +1,36 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+from shared_initialization import *
+
+class BeforeAnyHook(Hook):
+
+  def hook(self, env):
+    import params
+    env.set_params(params)
+
+    setup_users()
+    if params.has_namenode or params.dfs_type == 'HCFS':
+      setup_hadoop_env()
+    setup_java()
+
+if __name__ == "__main__":
+  BeforeAnyHook().execute()
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b36cdfd/ambari-server/src/main/resources/stack-hooks/before-ANY/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stack-hooks/before-ANY/scripts/params.py b/ambari-server/src/main/resources/stack-hooks/before-ANY/scripts/params.py
new file mode 100644
index 0000000..20992e2
--- /dev/null
+++ b/ambari-server/src/main/resources/stack-hooks/before-ANY/scripts/params.py
@@ -0,0 +1,254 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import collections
+import re
+import os
+import ast
+
+import ambari_simplejson as json # simplejson is much faster comparing to Python 2.6 json module and has the same functions set.
+
+from resource_management.libraries.script import Script
+from resource_management.libraries.functions import default
+from resource_management.libraries.functions import format
+from resource_management.libraries.functions import conf_select
+from resource_management.libraries.functions import stack_select
+from resource_management.libraries.functions import format_jvm_option
+from resource_management.libraries.functions.is_empty import is_empty
+from resource_management.libraries.functions.version import format_stack_version
+from resource_management.libraries.functions.expect import expect
+from resource_management.libraries.functions import StackFeature
+from resource_management.libraries.functions.stack_features import check_stack_feature
+from resource_management.libraries.functions.stack_features import get_stack_feature_version
+from resource_management.libraries.functions.get_architecture import get_architecture
+from ambari_commons.constants import AMBARI_SUDO_BINARY
+
+
+config = Script.get_config()
+tmp_dir = Script.get_tmp_dir()
+
+stack_root = Script.get_stack_root()
+
+architecture = get_architecture()
+
+dfs_type = default("/commandParams/dfs_type", "")
+
+artifact_dir = format("{tmp_dir}/AMBARI-artifacts/")
+jdk_name = default("/hostLevelParams/jdk_name", None)
+java_home = config['hostLevelParams']['java_home']
+java_version = expect("/hostLevelParams/java_version", int)
+jdk_location = config['hostLevelParams']['jdk_location']
+
+hadoop_custom_extensions_enabled = default("/configurations/core-site/hadoop.custom-extensions.enabled", False)
+
+sudo = AMBARI_SUDO_BINARY
+
+ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
+
+stack_version_unformatted = config['hostLevelParams']['stack_version']
+stack_version_formatted = format_stack_version(stack_version_unformatted)
+
+upgrade_type = Script.get_upgrade_type(default("/commandParams/upgrade_type", ""))
+version = default("/commandParams/version", None)
+# Handle upgrade and downgrade
+if (upgrade_type is not None) and version:
+  stack_version_formatted = format_stack_version(version)
+ambari_java_home = default("/commandParams/ambari_java_home", None)
+ambari_jdk_name = default("/commandParams/ambari_jdk_name", None)
+
+security_enabled = config['configurations']['cluster-env']['security_enabled']
+hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+
+# Some datanode settings
+dfs_dn_addr = default('/configurations/hdfs-site/dfs.datanode.address', None)
+dfs_dn_http_addr = default('/configurations/hdfs-site/dfs.datanode.http.address', None)
+dfs_dn_https_addr = default('/configurations/hdfs-site/dfs.datanode.https.address', None)
+dfs_http_policy = default('/configurations/hdfs-site/dfs.http.policy', None)
+secure_dn_ports_are_in_use = False
+
+def get_port(address):
+  """
+  Extracts port from the address like 0.0.0.0:1019
+  """
+  if address is None:
+    return None
+  m = re.search(r'(?:http(?:s)?://)?([\w\d.]*):(\d{1,5})', address)
+  if m is not None:
+    return int(m.group(2))
+  else:
+    return None
+
+def is_secure_port(port):
+  """
+  Returns True if port is root-owned at *nix systems
+  """
+  if port is not None:
+    return port < 1024
+  else:
+    return False
+
+# upgrades would cause these directories to have a version instead of "current"
+# which would cause a lot of problems when writing out hadoop-env.sh; instead
+# force the use of "current" in the hook
+hdfs_user_nofile_limit = default("/configurations/hadoop-env/hdfs_user_nofile_limit", "128000")
+hadoop_home = stack_select.get_hadoop_dir("home")
+hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec")
+hadoop_lib_home = stack_select.get_hadoop_dir("lib")
+
+hadoop_dir = "/etc/hadoop"
+hadoop_java_io_tmpdir = os.path.join(tmp_dir, "hadoop_java_io_tmpdir")
+datanode_max_locked_memory = config['configurations']['hdfs-site']['dfs.datanode.max.locked.memory']
+is_datanode_max_locked_memory_set = not is_empty(config['configurations']['hdfs-site']['dfs.datanode.max.locked.memory'])
+
+mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
+
+if not security_enabled:
+  hadoop_secure_dn_user = '""'
+else:
+  dfs_dn_port = get_port(dfs_dn_addr)
+  dfs_dn_http_port = get_port(dfs_dn_http_addr)
+  dfs_dn_https_port = get_port(dfs_dn_https_addr)
+  # We try to avoid inability to start datanode as a plain user due to usage of root-owned ports
+  if dfs_http_policy == "HTTPS_ONLY":
+    secure_dn_ports_are_in_use = is_secure_port(dfs_dn_port) or is_secure_port(dfs_dn_https_port)
+  elif dfs_http_policy == "HTTP_AND_HTTPS":
+    secure_dn_ports_are_in_use = is_secure_port(dfs_dn_port) or is_secure_port(dfs_dn_http_port) or is_secure_port(dfs_dn_https_port)
+  else:   # params.dfs_http_policy == "HTTP_ONLY" or not defined:
+    secure_dn_ports_are_in_use = is_secure_port(dfs_dn_port) or is_secure_port(dfs_dn_http_port)
+  if secure_dn_ports_are_in_use:
+    hadoop_secure_dn_user = hdfs_user
+  else:
+    hadoop_secure_dn_user = '""'
+
+#hadoop params
+hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
+hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
+hadoop_root_logger = config['configurations']['hadoop-env']['hadoop_root_logger']
+
+jsvc_path = "/usr/lib/bigtop-utils"
+
+hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
+namenode_heapsize = config['configurations']['hadoop-env']['namenode_heapsize']
+namenode_opt_newsize = config['configurations']['hadoop-env']['namenode_opt_newsize']
+namenode_opt_maxnewsize = config['configurations']['hadoop-env']['namenode_opt_maxnewsize']
+namenode_opt_permsize = format_jvm_option("/configurations/hadoop-env/namenode_opt_permsize","128m")
+namenode_opt_maxpermsize = format_jvm_option("/configurations/hadoop-env/namenode_opt_maxpermsize","256m")
+
+jtnode_opt_newsize = "200m"
+jtnode_opt_maxnewsize = "200m"
+jtnode_heapsize =  "1024m"
+ttnode_heapsize = "1024m"
+
+dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize']
+nfsgateway_heapsize = config['configurations']['hadoop-env']['nfsgateway_heapsize']
+mapred_pid_dir_prefix = default("/configurations/mapred-env/mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
+mapred_log_dir_prefix = default("/configurations/mapred-env/mapred_log_dir_prefix","/var/log/hadoop-mapreduce")
+hadoop_env_sh_template = config['configurations']['hadoop-env']['content']
+
+#users and groups
+hbase_user = config['configurations']['hbase-env']['hbase_user']
+smoke_user =  config['configurations']['cluster-env']['smokeuser']
+gmetad_user = config['configurations']['ganglia-env']["gmetad_user"]
+gmond_user = config['configurations']['ganglia-env']["gmond_user"]
+tez_user = config['configurations']['tez-env']["tez_user"]
+oozie_user = config['configurations']['oozie-env']["oozie_user"]
+falcon_user = config['configurations']['falcon-env']["falcon_user"]
+ranger_user = config['configurations']['ranger-env']["ranger_user"]
+zeppelin_user = config['configurations']['zeppelin-env']["zeppelin_user"]
+zeppelin_group = config['configurations']['zeppelin-env']["zeppelin_group"]
+
+user_group = config['configurations']['cluster-env']['user_group']
+
+ganglia_server_hosts = default("/clusterHostInfo/ganglia_server_host", [])
+namenode_host = default("/clusterHostInfo/namenode_host", [])
+hbase_master_hosts = default("/clusterHostInfo/hbase_master_hosts", [])
+oozie_servers = default("/clusterHostInfo/oozie_server", [])
+falcon_server_hosts = default("/clusterHostInfo/falcon_server_hosts", [])
+ranger_admin_hosts = default("/clusterHostInfo/ranger_admin_hosts", [])
+zeppelin_master_hosts = default("/clusterHostInfo/zeppelin_master_hosts", [])
+
+# get the correct version to use for checking stack features
+version_for_stack_feature_checks = get_stack_feature_version(config)
+
+
+has_namenode = not len(namenode_host) == 0
+has_ganglia_server = not len(ganglia_server_hosts) == 0
+has_tez = 'tez-site' in config['configurations']
+has_hbase_masters = not len(hbase_master_hosts) == 0
+has_oozie_server = not len(oozie_servers) == 0
+has_falcon_server_hosts = not len(falcon_server_hosts) == 0
+has_ranger_admin = not len(ranger_admin_hosts) == 0
+has_zeppelin_master = not len(zeppelin_master_hosts) == 0
+stack_supports_zk_security = check_stack_feature(StackFeature.SECURE_ZOOKEEPER, version_for_stack_feature_checks)
+
+# HDFS High Availability properties
+dfs_ha_enabled = False
+dfs_ha_nameservices = default('/configurations/hdfs-site/dfs.internal.nameservices', None)
+if dfs_ha_nameservices is None:
+  dfs_ha_nameservices = default('/configurations/hdfs-site/dfs.nameservices', None)
+dfs_ha_namenode_ids = default(format("/configurations/hdfs-site/dfs.ha.namenodes.{dfs_ha_nameservices}"), None)
+if dfs_ha_namenode_ids:
+  dfs_ha_namemodes_ids_list = dfs_ha_namenode_ids.split(",")
+  dfs_ha_namenode_ids_array_len = len(dfs_ha_namemodes_ids_list)
+  if dfs_ha_namenode_ids_array_len > 1:
+    dfs_ha_enabled = True
+
+if has_namenode or dfs_type == 'HCFS':
+    hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
+    hadoop_conf_secure_dir = os.path.join(hadoop_conf_dir, "secure")
+
+hbase_tmp_dir = "/tmp/hbase-hbase"
+
+proxyuser_group = default("/configurations/hadoop-env/proxyuser_group","users")
+ranger_group = config['configurations']['ranger-env']['ranger_group']
+dfs_cluster_administrators_group = config['configurations']['hdfs-site']["dfs.cluster.administrators"]
+
+sysprep_skip_create_users_and_groups = default("/configurations/cluster-env/sysprep_skip_create_users_and_groups", False)
+ignore_groupsusers_create = default("/configurations/cluster-env/ignore_groupsusers_create", False)
+fetch_nonlocal_groups = config['configurations']['cluster-env']["fetch_nonlocal_groups"]
+
+smoke_user_dirs = format("/tmp/hadoop-{smoke_user},/tmp/hsperfdata_{smoke_user},/home/{smoke_user},/tmp/{smoke_user},/tmp/sqoop-{smoke_user}")
+if has_hbase_masters:
+  hbase_user_dirs = format("/home/{hbase_user},/tmp/{hbase_user},/usr/bin/{hbase_user},/var/log/{hbase_user},{hbase_tmp_dir}")
+#repo params
+repo_info = config['hostLevelParams']['repo_info']
+service_repo_info = default("/hostLevelParams/service_repo_info",None)
+
+user_to_groups_dict = {}
+
+#Append new user-group mapping to the dict
+try:
+  user_group_map = ast.literal_eval(config['hostLevelParams']['user_groups'])
+  for key in user_group_map.iterkeys():
+    user_to_groups_dict[key] = user_group_map[key]
+except ValueError:
+  print('User Group mapping (user_group) is missing in the hostLevelParams')
+
+user_to_gid_dict = collections.defaultdict(lambda:user_group)
+
+user_list = json.loads(config['hostLevelParams']['user_list'])
+group_list = json.loads(config['hostLevelParams']['group_list'])
+host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
+
+tez_am_view_acls = config['configurations']['tez-site']["tez.am.view-acls"]
+override_uid = str(default("/configurations/cluster-env/override_uid", "true")).lower()
+
+# if NN HA on secure clutser, access Zookeper securely
+if stack_supports_zk_security and dfs_ha_enabled and security_enabled:
+    hadoop_zkfc_opts=format("-Dzookeeper.sasl.client=true -Dzookeeper.sasl.client.username=zookeeper -Djava.security.auth.login.config={hadoop_conf_secure_dir}/hdfs_jaas.conf -Dzookeeper.sasl.clientconfig=Client")

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b36cdfd/ambari-server/src/main/resources/stack-hooks/before-ANY/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stack-hooks/before-ANY/scripts/shared_initialization.py b/ambari-server/src/main/resources/stack-hooks/before-ANY/scripts/shared_initialization.py
new file mode 100644
index 0000000..27679e0
--- /dev/null
+++ b/ambari-server/src/main/resources/stack-hooks/before-ANY/scripts/shared_initialization.py
@@ -0,0 +1,273 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import os
+import re
+import getpass
+import tempfile
+from copy import copy
+from resource_management.libraries.functions.version import compare_versions
+from resource_management import *
+from resource_management.core import shell
+
+def setup_users():
+  """
+  Creates users before cluster installation
+  """
+  import params
+
+  should_create_users_and_groups = False
+  if params.host_sys_prepped:
+    should_create_users_and_groups = not params.sysprep_skip_create_users_and_groups
+  else:
+    should_create_users_and_groups = not params.ignore_groupsusers_create
+
+  if should_create_users_and_groups:
+    for group in params.group_list:
+      Group(group,
+      )
+
+    for user in params.user_list:
+      User(user,
+           uid = get_uid(user) if params.override_uid == "true" else None,
+           gid = params.user_to_gid_dict[user],
+           groups = params.user_to_groups_dict[user],
+           fetch_nonlocal_groups = params.fetch_nonlocal_groups,
+           )
+
+    if params.override_uid == "true":
+      set_uid(params.smoke_user, params.smoke_user_dirs)
+    else:
+      Logger.info('Skipping setting uid for smoke user as host is sys prepped')
+  else:
+    Logger.info('Skipping creation of User and Group as host is sys prepped or ignore_groupsusers_create flag is on')
+    pass
+
+
+  if params.has_hbase_masters:
+    Directory (params.hbase_tmp_dir,
+               owner = params.hbase_user,
+               mode=0775,
+               create_parents = True,
+               cd_access="a",
+    )
+
+    if params.override_uid == "true":
+      set_uid(params.hbase_user, params.hbase_user_dirs)
+    else:
+      Logger.info('Skipping setting uid for hbase user as host is sys prepped')
+
+  if should_create_users_and_groups:
+    if params.has_namenode:
+      create_dfs_cluster_admins()
+    if params.has_tez and params.stack_version_formatted != "" and compare_versions(params.stack_version_formatted, '2.3') >= 0:
+      create_tez_am_view_acls()
+  else:
+    Logger.info('Skipping setting dfs cluster admin and tez view acls as host is sys prepped')
+
+def create_dfs_cluster_admins():
+  """
+  dfs.cluster.administrators support format <comma-delimited list of usernames><space><comma-delimited list of group names>
+  """
+  import params
+
+  groups_list = create_users_and_groups(params.dfs_cluster_administrators_group)
+
+  User(params.hdfs_user,
+    groups = params.user_to_groups_dict[params.hdfs_user] + groups_list,
+    fetch_nonlocal_groups = params.fetch_nonlocal_groups
+  )
+
+def create_tez_am_view_acls():
+
+  """
+  tez.am.view-acls support format <comma-delimited list of usernames><space><comma-delimited list of group names>
+  """
+  import params
+
+  if not params.tez_am_view_acls.startswith("*"):
+    create_users_and_groups(params.tez_am_view_acls)
+
+def create_users_and_groups(user_and_groups):
+
+  import params
+
+  parts = re.split('\s+', user_and_groups)
+  if len(parts) == 1:
+    parts.append("")
+
+  users_list = parts[0].strip(",").split(",") if parts[0] else []
+  groups_list = parts[1].strip(",").split(",") if parts[1] else []
+
+  # skip creating groups and users if * is provided as value.
+  users_list = filter(lambda x: x != '*' , users_list)
+  groups_list = filter(lambda x: x != '*' , groups_list)
+
+  if users_list:
+    User(users_list,
+          fetch_nonlocal_groups = params.fetch_nonlocal_groups
+    )
+
+  if groups_list:
+    Group(copy(groups_list),
+    )
+  return groups_list
+
+def set_uid(user, user_dirs):
+  """
+  user_dirs - comma separated directories
+  """
+  import params
+
+  File(format("{tmp_dir}/changeUid.sh"),
+       content=StaticFile("changeToSecureUid.sh"),
+       mode=0555)
+  ignore_groupsusers_create_str = str(params.ignore_groupsusers_create).lower()
+  uid = get_uid(user, return_existing=True)
+  Execute(format("{tmp_dir}/changeUid.sh {user} {user_dirs} {new_uid}", new_uid=0 if uid is None else uid),
+          not_if = format("(test $(id -u {user}) -gt 1000) || ({ignore_groupsusers_create_str})"))
+
+def get_uid(user, return_existing=False):
+  """
+  Tries to get UID for username. It will try to find UID in custom properties in *cluster_env* and, if *return_existing=True*,
+  it will try to return UID of existing *user*.
+
+  :param user: username to get UID for
+  :param return_existing: return UID for existing user
+  :return:
+  """
+  import params
+  user_str = str(user) + "_uid"
+  service_env = [ serviceEnv for serviceEnv in params.config['configurations'] if user_str in params.config['configurations'][serviceEnv]]
+
+  if service_env and params.config['configurations'][service_env[0]][user_str]:
+    service_env_str = str(service_env[0])
+    uid = params.config['configurations'][service_env_str][user_str]
+    if len(service_env) > 1:
+      Logger.warning("Multiple values found for %s, using %s"  % (user_str, uid))
+    return uid
+  else:
+    if return_existing:
+      # pick up existing UID or try to find available UID in /etc/passwd, see changeToSecureUid.sh for more info
+      if user == params.smoke_user:
+        return None
+      File(format("{tmp_dir}/changeUid.sh"),
+           content=StaticFile("changeToSecureUid.sh"),
+           mode=0555)
+      code, newUid = shell.call(format("{tmp_dir}/changeUid.sh {user}"))
+      return int(newUid)
+    else:
+      # do not return UID for existing user, used in User resource call to let OS to choose UID for us
+      return None
+
+def setup_hadoop_env():
+  import params
+  stackversion = params.stack_version_unformatted
+  Logger.info("FS Type: {0}".format(params.dfs_type))
+  if params.has_namenode or stackversion.find('Gluster') >= 0 or params.dfs_type == 'HCFS':
+    if params.security_enabled:
+      tc_owner = "root"
+    else:
+      tc_owner = params.hdfs_user
+
+    # create /etc/hadoop
+    Directory(params.hadoop_dir, mode=0755)
+
+    # write out hadoop-env.sh, but only if the directory exists
+    if os.path.exists(params.hadoop_conf_dir):
+      File(os.path.join(params.hadoop_conf_dir, 'hadoop-env.sh'), owner=tc_owner,
+        group=params.user_group,
+        content=InlineTemplate(params.hadoop_env_sh_template))
+
+    # Create tmp dir for java.io.tmpdir
+    # Handle a situation when /tmp is set to noexec
+    Directory(params.hadoop_java_io_tmpdir,
+              owner=params.hdfs_user,
+              group=params.user_group,
+              mode=01777
+    )
+
+def setup_java():
+  """
+  Install jdk using specific params.
+  Install ambari jdk as well if the stack and ambari jdk are different.
+  """
+  import params
+  __setup_java(custom_java_home=params.java_home, custom_jdk_name=params.jdk_name)
+  if params.ambari_java_home and params.ambari_java_home != params.java_home:
+    __setup_java(custom_java_home=params.ambari_java_home, custom_jdk_name=params.ambari_jdk_name)
+
+def __setup_java(custom_java_home, custom_jdk_name):
+  """
+  Installs jdk using specific params, that comes from ambari-server
+  """
+  import params
+  java_exec = format("{custom_java_home}/bin/java")
+
+  if not os.path.isfile(java_exec):
+    if not params.jdk_name: # if custom jdk is used.
+      raise Fail(format("Unable to access {java_exec}. Confirm you have copied jdk to this host."))
+
+    jdk_curl_target = format("{tmp_dir}/{custom_jdk_name}")
+    java_dir = os.path.dirname(params.java_home)
+
+    Directory(params.artifact_dir,
+              create_parents = True,
+              )
+
+    File(jdk_curl_target,
+         content = DownloadSource(format("{jdk_location}/{custom_jdk_name}")),
+         not_if = format("test -f {jdk_curl_target}")
+         )
+
+    File(jdk_curl_target,
+         mode = 0755,
+         )
+
+    tmp_java_dir = tempfile.mkdtemp(prefix="jdk_tmp_", dir=params.tmp_dir)
+
+    try:
+      if params.jdk_name.endswith(".bin"):
+        chmod_cmd = ("chmod", "+x", jdk_curl_target)
+        install_cmd = format("cd {tmp_java_dir} && echo A | {jdk_curl_target} -noregister && {sudo} cp -rp {tmp_java_dir}/* {java_dir}")
+      elif params.jdk_name.endswith(".gz"):
+        chmod_cmd = ("chmod","a+x", java_dir)
+        install_cmd = format("cd {tmp_java_dir} && tar -xf {jdk_curl_target} && {sudo} cp -rp {tmp_java_dir}/* {java_dir}")
+
+      Directory(java_dir
+                )
+
+      Execute(chmod_cmd,
+              sudo = True,
+              )
+
+      Execute(install_cmd,
+              )
+
+    finally:
+      Directory(tmp_java_dir, action="delete")
+
+    File(format("{custom_java_home}/bin/java"),
+         mode=0755,
+         cd_access="a",
+         )
+    Execute(('chmod', '-R', '755', params.java_home),
+            sudo = True,
+            )
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b36cdfd/ambari-server/src/main/resources/stack-hooks/before-INSTALL/scripts/hook.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stack-hooks/before-INSTALL/scripts/hook.py b/ambari-server/src/main/resources/stack-hooks/before-INSTALL/scripts/hook.py
new file mode 100644
index 0000000..ce17776
--- /dev/null
+++ b/ambari-server/src/main/resources/stack-hooks/before-INSTALL/scripts/hook.py
@@ -0,0 +1,37 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import sys
+from resource_management import *
+from shared_initialization import *
+from repo_initialization import *
+
+class BeforeInstallHook(Hook):
+
+  def hook(self, env):
+    import params
+
+    self.run_custom_hook('before-ANY')
+    env.set_params(params)
+    
+    install_repos()
+    install_packages()
+
+if __name__ == "__main__":
+  BeforeInstallHook().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b36cdfd/ambari-server/src/main/resources/stack-hooks/before-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stack-hooks/before-INSTALL/scripts/params.py b/ambari-server/src/main/resources/stack-hooks/before-INSTALL/scripts/params.py
new file mode 100644
index 0000000..50c5a40
--- /dev/null
+++ b/ambari-server/src/main/resources/stack-hooks/before-INSTALL/scripts/params.py
@@ -0,0 +1,115 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from ambari_commons.constants import AMBARI_SUDO_BINARY
+from resource_management.libraries.functions.version import format_stack_version, compare_versions
+from resource_management.core.system import System
+from resource_management.libraries.script.script import Script
+from resource_management.libraries.functions import default, format
+from resource_management.libraries.functions.expect import expect
+
+config = Script.get_config()
+tmp_dir = Script.get_tmp_dir()
+sudo = AMBARI_SUDO_BINARY
+
+stack_version_unformatted = config['hostLevelParams']['stack_version']
+agent_stack_retry_on_unavailability = config['hostLevelParams']['agent_stack_retry_on_unavailability']
+agent_stack_retry_count = expect("/hostLevelParams/agent_stack_retry_count", int)
+stack_version_formatted = format_stack_version(stack_version_unformatted)
+
+#users and groups
+hbase_user = config['configurations']['hbase-env']['hbase_user']
+smoke_user =  config['configurations']['cluster-env']['smokeuser']
+gmetad_user = config['configurations']['ganglia-env']["gmetad_user"]
+gmond_user = config['configurations']['ganglia-env']["gmond_user"]
+tez_user = config['configurations']['tez-env']["tez_user"]
+
+user_group = config['configurations']['cluster-env']['user_group']
+proxyuser_group = default("/configurations/hadoop-env/proxyuser_group","users")
+
+hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
+
+# repo templates
+repo_rhel_suse =  config['configurations']['cluster-env']['repo_suse_rhel_template']
+repo_ubuntu =  config['configurations']['cluster-env']['repo_ubuntu_template']
+
+#hosts
+hostname = config["hostname"]
+ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
+rm_host = default("/clusterHostInfo/rm_host", [])
+slave_hosts = default("/clusterHostInfo/slave_hosts", [])
+oozie_servers = default("/clusterHostInfo/oozie_server", [])
+hcat_server_hosts = default("/clusterHostInfo/webhcat_server_host", [])
+hive_server_host =  default("/clusterHostInfo/hive_server_host", [])
+hbase_master_hosts = default("/clusterHostInfo/hbase_master_hosts", [])
+hs_host = default("/clusterHostInfo/hs_host", [])
+jtnode_host = default("/clusterHostInfo/jtnode_host", [])
+namenode_host = default("/clusterHostInfo/namenode_host", [])
+zk_hosts = default("/clusterHostInfo/zookeeper_hosts", [])
+ganglia_server_hosts = default("/clusterHostInfo/ganglia_server_host", [])
+storm_server_hosts = default("/clusterHostInfo/nimbus_hosts", [])
+falcon_host =  default('/clusterHostInfo/falcon_server_hosts', [])
+
+has_sqoop_client = 'sqoop-env' in config['configurations']
+has_namenode = not len(namenode_host) == 0
+has_hs = not len(hs_host) == 0
+has_resourcemanager = not len(rm_host) == 0
+has_slaves = not len(slave_hosts) == 0
+has_oozie_server = not len(oozie_servers)  == 0
+has_hcat_server_host = not len(hcat_server_hosts)  == 0
+has_hive_server_host = not len(hive_server_host)  == 0
+has_hbase_masters = not len(hbase_master_hosts) == 0
+has_zk_host = not len(zk_hosts) == 0
+has_ganglia_server = not len(ganglia_server_hosts) == 0
+has_storm_server = not len(storm_server_hosts) == 0
+has_falcon_server = not len(falcon_host) == 0
+has_tez = 'tez-site' in config['configurations']
+
+is_namenode_master = hostname in namenode_host
+is_jtnode_master = hostname in jtnode_host
+is_rmnode_master = hostname in rm_host
+is_hsnode_master = hostname in hs_host
+is_hbase_master = hostname in hbase_master_hosts
+is_slave = hostname in slave_hosts
+if has_ganglia_server:
+  ganglia_server_host = ganglia_server_hosts[0]
+
+hbase_tmp_dir = "/tmp/hbase-hbase"
+
+#security params
+security_enabled = config['configurations']['cluster-env']['security_enabled']
+
+#java params
+java_home = config['hostLevelParams']['java_home']
+artifact_dir = format("{tmp_dir}/AMBARI-artifacts/")
+jdk_name = default("/hostLevelParams/jdk_name", None) # None when jdk is already installed by user
+jce_policy_zip = default("/hostLevelParams/jce_name", None) # None when jdk is already installed by user
+jce_location = config['hostLevelParams']['jdk_location']
+jdk_location = config['hostLevelParams']['jdk_location']
+ignore_groupsusers_create = default("/configurations/cluster-env/ignore_groupsusers_create", False)
+host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)
+
+smoke_user_dirs = format("/tmp/hadoop-{smoke_user},/tmp/hsperfdata_{smoke_user},/home/{smoke_user},/tmp/{smoke_user},/tmp/sqoop-{smoke_user}")
+if has_hbase_masters:
+  hbase_user_dirs = format("/home/{hbase_user},/tmp/{hbase_user},/usr/bin/{hbase_user},/var/log/{hbase_user},{hbase_tmp_dir}")
+#repo params
+repo_info = config['hostLevelParams']['repo_info']
+service_repo_info = default("/hostLevelParams/service_repo_info",None)
+
+repo_file = default("/repositoryFile", None)

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b36cdfd/ambari-server/src/main/resources/stack-hooks/before-INSTALL/scripts/repo_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stack-hooks/before-INSTALL/scripts/repo_initialization.py b/ambari-server/src/main/resources/stack-hooks/before-INSTALL/scripts/repo_initialization.py
new file mode 100644
index 0000000..9f2b344
--- /dev/null
+++ b/ambari-server/src/main/resources/stack-hooks/before-INSTALL/scripts/repo_initialization.py
@@ -0,0 +1,75 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from ambari_commons.os_check import OSCheck
+from resource_management.libraries.resources.repository import Repository
+from resource_management.libraries.functions.repository_util import create_repo_files, CommandRepository, UBUNTU_REPO_COMPONENTS_POSTFIX
+from resource_management.core.logger import Logger
+import ambari_simplejson as json
+
+
+def _alter_repo(action, repo_string, repo_template):
+  """
+  @param action: "delete" or "create"
+  @param repo_string: e.g. "[{\"baseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0\",\"osType\":\"centos6\",\"repoId\":\"HDP-2.0._\",\"repoName\":\"HDP\",\"defaultBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0\"}]"
+  """
+  repo_dicts = json.loads(repo_string)
+
+  if not isinstance(repo_dicts, list):
+    repo_dicts = [repo_dicts]
+
+  if 0 == len(repo_dicts):
+    Logger.info("Repository list is empty. Ambari may not be managing the repositories.")
+  else:
+    Logger.info("Initializing {0} repositories".format(str(len(repo_dicts))))
+
+  for repo in repo_dicts:
+    if not 'baseUrl' in repo:
+      repo['baseUrl'] = None
+    if not 'mirrorsList' in repo:
+      repo['mirrorsList'] = None
+
+    ubuntu_components = [ repo['distribution'] if 'distribution' in repo and repo['distribution'] else repo['repoName'] ] \
+                        + [repo['components'].replace(",", " ") if 'components' in repo and repo['components'] else UBUNTU_REPO_COMPONENTS_POSTFIX]
+
+    Repository(repo['repoId'],
+               action = action,
+               base_url = repo['baseUrl'],
+               mirror_list = repo['mirrorsList'],
+               repo_file_name = repo['repoName'],
+               repo_template = repo_template,
+               components = ubuntu_components) # ubuntu specific
+
+
+def install_repos():
+  import params
+  if params.host_sys_prepped:
+    return
+
+  template = params.repo_rhel_suse if OSCheck.is_suse_family() or OSCheck.is_redhat_family() else params.repo_ubuntu
+
+  # use this newer way of specifying repositories, if available
+  if params.repo_file is not None:
+    create_repo_files(template, CommandRepository(params.repo_file))
+    return
+
+  _alter_repo("create", params.repo_info, template)
+
+  if params.service_repo_info:
+    _alter_repo("create", params.service_repo_info, template)

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b36cdfd/ambari-server/src/main/resources/stack-hooks/before-INSTALL/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stack-hooks/before-INSTALL/scripts/shared_initialization.py b/ambari-server/src/main/resources/stack-hooks/before-INSTALL/scripts/shared_initialization.py
new file mode 100644
index 0000000..1609050
--- /dev/null
+++ b/ambari-server/src/main/resources/stack-hooks/before-INSTALL/scripts/shared_initialization.py
@@ -0,0 +1,37 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import os
+
+from resource_management.libraries.functions import stack_tools
+from resource_management.libraries.functions.version import compare_versions
+from resource_management.core.resources.packaging import Package
+
+def install_packages():
+  import params
+  if params.host_sys_prepped:
+    return
+
+  packages = ['unzip', 'curl']
+  if params.stack_version_formatted != "" and compare_versions(params.stack_version_formatted, '2.2') >= 0:
+    stack_selector_package = stack_tools.get_stack_tool_package(stack_tools.STACK_SELECTOR_NAME)
+    packages.append(stack_selector_package)
+  Package(packages,
+          retry_on_repo_unavailability=params.agent_stack_retry_on_unavailability,
+          retry_count=params.agent_stack_retry_count)

http://git-wip-us.apache.org/repos/asf/ambari/blob/5b36cdfd/ambari-server/src/main/resources/stack-hooks/before-RESTART/scripts/hook.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stack-hooks/before-RESTART/scripts/hook.py b/ambari-server/src/main/resources/stack-hooks/before-RESTART/scripts/hook.py
new file mode 100644
index 0000000..14b9d99
--- /dev/null
+++ b/ambari-server/src/main/resources/stack-hooks/before-RESTART/scripts/hook.py
@@ -0,0 +1,29 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+
+class BeforeRestartHook(Hook):
+
+  def hook(self, env):
+    self.run_custom_hook('before-START')
+
+if __name__ == "__main__":
+  BeforeRestartHook().execute()
+