You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ja...@apache.org on 2014/11/25 00:45:43 UTC

[24/24] ambari git commit: AMBARI-7872 Create stack definitions for PHD-3.0.0.0 (vasanm, adenisso, tyu, Boxiong Ding, rpidva, rmeneses, Sourabh Bansod, Ashvin Agrawal, Sujeet Varakhedi via jaoki)

AMBARI-7872 Create stack definitions for PHD-3.0.0.0 (vasanm, adenisso, tyu, Boxiong Ding, rpidva, rmeneses, Sourabh Bansod, Ashvin Agrawal, Sujeet Varakhedi via jaoki)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e7d07030
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e7d07030
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e7d07030

Branch: refs/heads/trunk
Commit: e7d0703033f5b799e9237c1cb369b1b8557182af
Parents: bd32ef3
Author: Jun Aoki <ja...@apache.org>
Authored: Mon Nov 24 15:43:40 2014 -0800
Committer: Jun Aoki <ja...@apache.org>
Committed: Mon Nov 24 15:43:40 2014 -0800

----------------------------------------------------------------------
 ambari-server/pom.xml                           |     2 +
 .../3.0.0.0/blueprints/multinode-default.json   |   183 +
 .../3.0.0.0/blueprints/singlenode-default.json  |   137 +
 .../PHD/3.0.0.0/configuration/cluster-env.xml   |    56 +
 .../3.0.0.0/hooks/after-INSTALL/scripts/hook.py |    35 +
 .../hooks/after-INSTALL/scripts/params.py       |    73 +
 .../scripts/shared_initialization.py            |    38 +
 .../hooks/before-ANY/files/changeToSecureUid.sh |    50 +
 .../3.0.0.0/hooks/before-ANY/scripts/hook.py    |    35 +
 .../3.0.0.0/hooks/before-ANY/scripts/params.py  |   134 +
 .../before-ANY/scripts/shared_initialization.py |   114 +
 .../hooks/before-INSTALL/scripts/hook.py        |    38 +
 .../hooks/before-INSTALL/scripts/params.py      |   122 +
 .../scripts/repo_initialization.py              |    57 +
 .../scripts/shared_initialization.py            |    63 +
 .../before-INSTALL/templates/repo_suse_rhel.j2  |     7 +
 .../before-INSTALL/templates/repo_ubuntu.j2     |     1 +
 .../hooks/before-RESTART/scripts/hook.py        |    29 +
 .../hooks/before-START/files/checkForFormat.sh  |    65 +
 .../before-START/files/task-log4j.properties    |   134 +
 .../3.0.0.0/hooks/before-START/scripts/hook.py  |    37 +
 .../hooks/before-START/scripts/params.py        |   158 +
 .../scripts/shared_initialization.py            |   177 +
 .../templates/commons-logging.properties.j2     |    43 +
 .../templates/exclude_hosts_list.j2             |    21 +
 .../templates/hadoop-metrics2.properties.j2     |    65 +
 .../before-START/templates/health_check-v2.j2   |    81 +
 .../before-START/templates/health_check.j2      |   109 +
 .../templates/include_hosts_list.j2             |    21 +
 .../resources/stacks/PHD/3.0.0.0/metainfo.xml   |    22 +
 .../stacks/PHD/3.0.0.0/repos/repoinfo.xml       |    33 +
 .../stacks/PHD/3.0.0.0/role_command_order.json  |    75 +
 .../services/FLUME/configuration/flume-conf.xml |    31 +
 .../services/FLUME/configuration/flume-env.xml  |    78 +
 .../FLUME/configuration/flume-log4j.xml         |    31 +
 .../PHD/3.0.0.0/services/FLUME/metainfo.xml     |    69 +
 .../PHD/3.0.0.0/services/FLUME/metrics.json     |   716 +
 .../services/FLUME/package/scripts/flume.py     |   255 +
 .../FLUME/package/scripts/flume_check.py        |    40 +
 .../FLUME/package/scripts/flume_handler.py      |   121 +
 .../services/FLUME/package/scripts/params.py    |    70 +
 .../FLUME/package/templates/flume.conf.j2       |    24 +
 .../FLUME/package/templates/log4j.properties.j2 |    67 +
 .../GANGLIA/configuration/ganglia-env.xml       |    77 +
 .../PHD/3.0.0.0/services/GANGLIA/metainfo.xml   |   127 +
 .../GANGLIA/package/files/checkGmetad.sh        |    37 +
 .../GANGLIA/package/files/checkGmond.sh         |    62 +
 .../GANGLIA/package/files/checkRrdcached.sh     |    34 +
 .../services/GANGLIA/package/files/gmetad.init  |    73 +
 .../services/GANGLIA/package/files/gmetadLib.sh |   204 +
 .../services/GANGLIA/package/files/gmond.init   |    73 +
 .../services/GANGLIA/package/files/gmondLib.sh  |   539 +
 .../GANGLIA/package/files/rrdcachedLib.sh       |    47 +
 .../GANGLIA/package/files/setupGanglia.sh       |   141 +
 .../GANGLIA/package/files/startGmetad.sh        |    68 +
 .../GANGLIA/package/files/startGmond.sh         |    85 +
 .../GANGLIA/package/files/startRrdcached.sh     |    79 +
 .../GANGLIA/package/files/stopGmetad.sh         |    43 +
 .../services/GANGLIA/package/files/stopGmond.sh |    54 +
 .../GANGLIA/package/files/stopRrdcached.sh      |    41 +
 .../GANGLIA/package/files/teardownGanglia.sh    |    28 +
 .../GANGLIA/package/scripts/functions.py        |    31 +
 .../services/GANGLIA/package/scripts/ganglia.py |    97 +
 .../GANGLIA/package/scripts/ganglia_monitor.py  |   236 +
 .../package/scripts/ganglia_monitor_service.py  |    27 +
 .../GANGLIA/package/scripts/ganglia_server.py   |   119 +
 .../package/scripts/ganglia_server_service.py   |    27 +
 .../services/GANGLIA/package/scripts/params.py  |   160 +
 .../GANGLIA/package/scripts/status_params.py    |    25 +
 .../GANGLIA/package/templates/ganglia.conf.j2   |    34 +
 .../package/templates/gangliaClusters.conf.j2   |    43 +
 .../GANGLIA/package/templates/gangliaEnv.sh.j2  |    46 +
 .../GANGLIA/package/templates/gangliaLib.sh.j2  |    85 +
 .../GANGLIA/package/templates/rrd.py.j2         |   361 +
 .../services/HBASE/configuration/hbase-env.xml  |   133 +
 .../HBASE/configuration/hbase-log4j.xml         |   143 +
 .../HBASE/configuration/hbase-policy.xml        |    53 +
 .../services/HBASE/configuration/hbase-site.xml |   331 +
 .../PHD/3.0.0.0/services/HBASE/metainfo.xml     |   139 +
 .../PHD/3.0.0.0/services/HBASE/metrics.json     | 13635 +++++++++++++++++
 .../HBASE/package/files/draining_servers.rb     |   164 +
 .../HBASE/package/files/hbaseSmokeVerify.sh     |    34 +
 .../services/HBASE/package/scripts/__init__.py  |    19 +
 .../services/HBASE/package/scripts/functions.py |    40 +
 .../services/HBASE/package/scripts/hbase.py     |   144 +
 .../HBASE/package/scripts/hbase_client.py       |    43 +
 .../HBASE/package/scripts/hbase_decommission.py |    74 +
 .../HBASE/package/scripts/hbase_master.py       |    70 +
 .../HBASE/package/scripts/hbase_regionserver.py |    66 +
 .../HBASE/package/scripts/hbase_service.py      |    51 +
 .../services/HBASE/package/scripts/params.py    |   138 +
 .../HBASE/package/scripts/service_check.py      |    79 +
 .../HBASE/package/scripts/status_params.py      |    26 +
 ...-metrics2-hbase.properties-GANGLIA-MASTER.j2 |    81 +
 ...doop-metrics2-hbase.properties-GANGLIA-RS.j2 |    80 +
 .../HBASE/package/templates/hbase-smoke.sh.j2   |    44 +
 .../package/templates/hbase_client_jaas.conf.j2 |    23 +
 .../templates/hbase_grant_permissions.j2        |    39 +
 .../package/templates/hbase_master_jaas.conf.j2 |    26 +
 .../templates/hbase_regionserver_jaas.conf.j2   |    26 +
 .../HBASE/package/templates/regionservers.j2    |    20 +
 .../services/HDFS/configuration/core-site.xml   |   180 +
 .../services/HDFS/configuration/hadoop-env.xml  |   200 +
 .../HDFS/configuration/hadoop-policy.xml        |   134 +
 .../services/HDFS/configuration/hdfs-log4j.xml  |   201 +
 .../services/HDFS/configuration/hdfs-site.xml   |   430 +
 .../PHD/3.0.0.0/services/HDFS/metainfo.xml      |   225 +
 .../PHD/3.0.0.0/services/HDFS/metrics.json      |  7840 ++++++++++
 .../HDFS/package/files/checkForFormat.sh        |    70 +
 .../services/HDFS/package/files/checkWebUI.py   |    53 +
 .../scripts/balancer-emulator/balancer-err.log  |  1032 ++
 .../scripts/balancer-emulator/balancer.log      |    29 +
 .../scripts/balancer-emulator/hdfs-command.py   |    45 +
 .../services/HDFS/package/scripts/datanode.py   |    59 +
 .../services/HDFS/package/scripts/hdfs.py       |    80 +
 .../HDFS/package/scripts/hdfs_client.py         |    53 +
 .../HDFS/package/scripts/hdfs_datanode.py       |    56 +
 .../HDFS/package/scripts/hdfs_namenode.py       |   160 +
 .../HDFS/package/scripts/hdfs_rebalance.py      |   130 +
 .../HDFS/package/scripts/hdfs_snamenode.py      |    45 +
 .../HDFS/package/scripts/journalnode.py         |    73 +
 .../services/HDFS/package/scripts/namenode.py   |   134 +
 .../services/HDFS/package/scripts/params.py     |   235 +
 .../HDFS/package/scripts/service_check.py       |   120 +
 .../services/HDFS/package/scripts/snamenode.py  |    65 +
 .../HDFS/package/scripts/status_params.py       |    31 +
 .../services/HDFS/package/scripts/utils.py      |   149 +
 .../services/HDFS/package/scripts/zkfc_slave.py |    64 +
 .../package/templates/exclude_hosts_list.j2     |    21 +
 .../HDFS/package/templates/hdfs.conf.j2         |    35 +
 .../services/HDFS/package/templates/slaves.j2   |    21 +
 .../services/HIVE/configuration/hcat-env.xml    |    57 +
 .../services/HIVE/configuration/hive-env.xml    |   134 +
 .../HIVE/configuration/hive-exec-log4j.xml      |   111 +
 .../services/HIVE/configuration/hive-log4j.xml  |   120 +
 .../services/HIVE/configuration/hive-site.xml   |   538 +
 .../services/HIVE/configuration/webhcat-env.xml |    54 +
 .../HIVE/configuration/webhcat-site.xml         |   138 +
 .../HIVE/etc/hive-schema-0.12.0.mysql.sql       |   777 +
 .../HIVE/etc/hive-schema-0.12.0.oracle.sql      |   718 +
 .../HIVE/etc/hive-schema-0.12.0.postgres.sql    |  1406 ++
 .../PHD/3.0.0.0/services/HIVE/metainfo.xml      |   276 +
 .../services/HIVE/package/files/addMysqlUser.sh |    41 +
 .../HIVE/package/files/addPostgreSQLUser.sh     |    44 +
 .../services/HIVE/package/files/hcatSmoke.sh    |    36 +
 .../services/HIVE/package/files/hiveSmoke.sh    |    24 +
 .../services/HIVE/package/files/hiveserver2.sql |    23 +
 .../HIVE/package/files/hiveserver2Smoke.sh      |    32 +
 .../services/HIVE/package/files/pigSmoke.sh     |    18 +
 .../HIVE/package/files/startMetastore.sh        |    23 +
 .../HIVE/package/files/templetonSmoke.sh        |    96 +
 .../services/HIVE/package/scripts/__init__.py   |    19 +
 .../services/HIVE/package/scripts/hcat.py       |    58 +
 .../HIVE/package/scripts/hcat_client.py         |    43 +
 .../HIVE/package/scripts/hcat_service_check.py  |    80 +
 .../services/HIVE/package/scripts/hive.py       |   216 +
 .../HIVE/package/scripts/hive_client.py         |    42 +
 .../HIVE/package/scripts/hive_metastore.py      |    64 +
 .../HIVE/package/scripts/hive_server.py         |    66 +
 .../HIVE/package/scripts/hive_service.py        |   106 +
 .../HIVE/package/scripts/install_jars.py        |   108 +
 .../HIVE/package/scripts/mysql_server.py        |    70 +
 .../HIVE/package/scripts/mysql_service.py       |    46 +
 .../services/HIVE/package/scripts/params.py     |   283 +
 .../HIVE/package/scripts/postgresql_server.py   |   113 +
 .../HIVE/package/scripts/postgresql_service.py  |    41 +
 .../HIVE/package/scripts/service_check.py       |    46 +
 .../HIVE/package/scripts/status_params.py       |    38 +
 .../services/HIVE/package/scripts/webhcat.py    |   131 +
 .../HIVE/package/scripts/webhcat_server.py      |    53 +
 .../HIVE/package/scripts/webhcat_service.py     |    40 +
 .../package/scripts/webhcat_service_check.py    |    41 +
 .../package/templates/startHiveserver2.sh.j2    |    29 +
 .../NAGIOS/configuration/nagios-env.xml         |    53 +
 .../PHD/3.0.0.0/services/NAGIOS/metainfo.xml    |   163 +
 .../NAGIOS/package/files/check_aggregate.php    |   248 +
 .../NAGIOS/package/files/check_ambari_alerts.py |    80 +
 .../package/files/check_checkpoint_time.py      |   123 +
 .../services/NAGIOS/package/files/check_cpu.php |   109 +
 .../services/NAGIOS/package/files/check_cpu.pl  |   114 +
 .../NAGIOS/package/files/check_cpu_ha.php       |   116 +
 .../package/files/check_datanode_storage.php    |   100 +
 .../NAGIOS/package/files/check_hdfs_blocks.php  |   102 +
 .../package/files/check_hdfs_capacity.php       |   109 +
 .../files/check_hive_metastore_status.sh        |    45 +
 .../package/files/check_hive_thrift_port.py     |    72 +
 .../NAGIOS/package/files/check_hue_status.sh    |    31 +
 .../files/check_mapred_local_dir_used.sh        |    34 +
 .../package/files/check_name_dir_status.php     |    93 +
 .../NAGIOS/package/files/check_namenodes_ha.sh  |    83 +
 .../package/files/check_nodemanager_health.sh   |    45 +
 .../NAGIOS/package/files/check_oozie_status.sh  |    45 +
 .../NAGIOS/package/files/check_rpcq_latency.php |   104 +
 .../package/files/check_rpcq_latency_ha.php     |   115 +
 .../package/files/check_templeton_status.sh     |    46 +
 .../NAGIOS/package/files/check_webui.sh         |   103 +
 .../NAGIOS/package/files/check_webui_ha.sh      |    64 +
 .../package/files/hdp_mon_nagios_addons.conf    |    24 +
 .../NAGIOS/package/files/hdp_nagios_init.php    |    81 +
 .../services/NAGIOS/package/files/mm_wrapper.py |   326 +
 .../NAGIOS/package/files/nagios_alerts.php      |   513 +
 .../services/NAGIOS/package/files/sys_logger.py |   197 +
 .../NAGIOS/package/scripts/functions.py         |    47 +
 .../services/NAGIOS/package/scripts/nagios.py   |   109 +
 .../NAGIOS/package/scripts/nagios_server.py     |   111 +
 .../package/scripts/nagios_server_config.py     |    99 +
 .../NAGIOS/package/scripts/nagios_service.py    |   103 +
 .../services/NAGIOS/package/scripts/params.py   |   366 +
 .../NAGIOS/package/scripts/status_params.py     |    29 +
 .../NAGIOS/package/templates/contacts.cfg.j2    |   109 +
 .../package/templates/hadoop-commands.cfg.j2    |   166 +
 .../package/templates/hadoop-hostgroups.cfg.j2  |    33 +
 .../package/templates/hadoop-hosts.cfg.j2       |    53 +
 .../templates/hadoop-servicegroups.cfg.j2       |   113 +
 .../package/templates/hadoop-services.cfg.j2    |   791 +
 .../NAGIOS/package/templates/nagios.cfg.j2      |  1365 ++
 .../NAGIOS/package/templates/nagios.conf.j2     |    84 +
 .../services/NAGIOS/package/templates/nagios.j2 |   164 +
 .../NAGIOS/package/templates/resource.cfg.j2    |    51 +
 .../services/OOZIE/configuration/oozie-env.xml  |   129 +
 .../OOZIE/configuration/oozie-log4j.xml         |    97 +
 .../services/OOZIE/configuration/oozie-site.xml |   312 +
 .../PHD/3.0.0.0/services/OOZIE/metainfo.xml     |   153 +
 .../services/OOZIE/package/files/oozieSmoke2.sh |   112 +
 .../OOZIE/package/files/wrap_ooziedb.sh         |    31 +
 .../services/OOZIE/package/scripts/oozie.py     |   152 +
 .../OOZIE/package/scripts/oozie_client.py       |    43 +
 .../OOZIE/package/scripts/oozie_server.py       |    56 +
 .../OOZIE/package/scripts/oozie_service.py      |    74 +
 .../services/OOZIE/package/scripts/params.py    |   167 +
 .../OOZIE/package/scripts/service_check.py      |    60 +
 .../OOZIE/package/scripts/status_params.py      |    26 +
 .../package/templates/catalina.properties.j2    |    81 +
 .../package/templates/oozie-log4j.properties.j2 |    92 +
 .../services/PIG/configuration/pig-env.xml      |    38 +
 .../services/PIG/configuration/pig-log4j.xml    |    62 +
 .../PIG/configuration/pig-properties.xml        |    92 +
 .../PHD/3.0.0.0/services/PIG/metainfo.xml       |    85 +
 .../services/PIG/package/files/pigSmoke.sh      |    18 +
 .../services/PIG/package/scripts/params.py      |    57 +
 .../3.0.0.0/services/PIG/package/scripts/pig.py |    59 +
 .../services/PIG/package/scripts/pig_client.py  |    41 +
 .../PIG/package/scripts/service_check.py        |    69 +
 .../YARN/configuration-mapred/mapred-env.xml    |    65 +
 .../YARN/configuration-mapred/mapred-site.xml   |   360 +
 .../YARN/configuration/capacity-scheduler.xml   |   132 +
 .../services/YARN/configuration/yarn-env.xml    |   184 +
 .../services/YARN/configuration/yarn-log4j.xml  |    71 +
 .../services/YARN/configuration/yarn-site.xml   |   413 +
 .../PHD/3.0.0.0/services/YARN/metainfo.xml      |   249 +
 .../PHD/3.0.0.0/services/YARN/metrics.json      |  5360 +++++++
 .../files/validateYarnComponentStatus.py        |   170 +
 .../services/YARN/package/scripts/__init__.py   |    20 +
 .../scripts/application_timeline_server.py      |    57 +
 .../YARN/package/scripts/historyserver.py       |    53 +
 .../package/scripts/mapred_service_check.py     |    80 +
 .../YARN/package/scripts/mapreduce2_client.py   |    42 +
 .../YARN/package/scripts/nodemanager.py         |    59 +
 .../services/YARN/package/scripts/params.py     |   176 +
 .../YARN/package/scripts/resourcemanager.py     |   101 +
 .../services/YARN/package/scripts/service.py    |    75 +
 .../YARN/package/scripts/service_check.py       |    68 +
 .../YARN/package/scripts/status_params.py       |    36 +
 .../services/YARN/package/scripts/yarn.py       |   238 +
 .../YARN/package/scripts/yarn_client.py         |    42 +
 .../package/templates/container-executor.cfg.j2 |    40 +
 .../package/templates/exclude_hosts_list.j2     |    21 +
 .../YARN/package/templates/mapreduce.conf.j2    |    35 +
 .../package/templates/taskcontroller.cfg.j2     |    38 +
 .../YARN/package/templates/yarn.conf.j2         |    35 +
 .../ZOOKEEPER/configuration/zookeeper-env.xml   |    85 +
 .../ZOOKEEPER/configuration/zookeeper-log4j.xml |   101 +
 .../PHD/3.0.0.0/services/ZOOKEEPER/metainfo.xml |    89 +
 .../services/ZOOKEEPER/package/files/zkEnv.sh   |    96 +
 .../ZOOKEEPER/package/files/zkServer.sh         |   120 +
 .../ZOOKEEPER/package/files/zkService.sh        |    26 +
 .../services/ZOOKEEPER/package/files/zkSmoke.sh |    78 +
 .../ZOOKEEPER/package/scripts/__init__.py       |    20 +
 .../ZOOKEEPER/package/scripts/params.py         |    86 +
 .../ZOOKEEPER/package/scripts/service_check.py  |    46 +
 .../ZOOKEEPER/package/scripts/status_params.py  |    26 +
 .../ZOOKEEPER/package/scripts/zookeeper.py      |   110 +
 .../package/scripts/zookeeper_client.py         |    42 +
 .../package/scripts/zookeeper_server.py         |    54 +
 .../package/scripts/zookeeper_service.py        |    42 +
 .../package/templates/configuration.xsl.j2      |    42 +
 .../ZOOKEEPER/package/templates/zoo.cfg.j2      |    69 +
 .../templates/zookeeper_client_jaas.conf.j2     |    23 +
 .../package/templates/zookeeper_jaas.conf.j2    |    26 +
 .../PHD/3.0.0.0/services/stack_advisor.py       |   443 +
 ambari-web/app/data/PHD/site_properties.js      |  3725 +++++
 291 files changed, 63716 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e7d07030/ambari-server/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-server/pom.xml b/ambari-server/pom.xml
index e73cb0f..e03b626 100644
--- a/ambari-server/pom.xml
+++ b/ambari-server/pom.xml
@@ -170,6 +170,8 @@
             <exclude>src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/balancer-emulator/balancer.log</exclude>
             <exclude>src/main/resources/stacks/BIGTOP/0.8/services/HDFS/package/scripts/balancer-emulator/balancer.log</exclude>
             <exclude>src/main/resources/stacks/BIGTOP/0.8/services/HDFS/package/scripts/balancer-emulator/balancer-err.log</exclude>
+            <exclude>src/main/resources/stacks/PHD/3.0.0.0/services/HDFS/package/scripts/balancer-emulator/balancer.log</exclude>
+            <exclude>src/main/resources/stacks/PHD/3.0.0.0/services/HDFS/package/scripts/balancer-emulator/balancer-err.log</exclude>
             <exclude>conf/unix/ca.config</exclude>
             <exclude>conf/unix/krb5JAASLogin.conf</exclude>
             <exclude>conf/windows/ca.config</exclude>

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7d07030/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/blueprints/multinode-default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/blueprints/multinode-default.json b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/blueprints/multinode-default.json
new file mode 100644
index 0000000..0c871c2
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/blueprints/multinode-default.json
@@ -0,0 +1,183 @@
+{
+    "Blueprints": {
+        "stack_name": "PHD", 
+        "stack_version": "3.0.0.0", 
+        "blueprint_name": "blueprint-multinode-default"
+    }, 
+    "host_groups": [
+        {
+            "cardinality": "1", 
+            "name": "master_1", 
+            "components": [
+                {
+                    "name": "NAMENODE"
+                }, 
+                {
+                    "name": "ZOOKEEPER_SERVER"
+                }, 
+                {
+                    "name": "HBASE_MASTER"
+                }, 
+                {
+                    "name": "GANGLIA_SERVER"
+                }, 
+                {
+                    "name": "HDFS_CLIENT"
+                }, 
+                {
+                    "name": "YARN_CLIENT"
+                }, 
+                {
+                    "name": "HCAT"
+                }, 
+                {
+                    "name": "GANGLIA_MONITOR"
+                }
+            ]
+        }, 
+        {
+            "cardinality": "1", 
+            "name": "master_2", 
+            "components": [
+                {
+                    "name": "ZOOKEEPER_CLIENT"
+                }, 
+                {
+                    "name": "HISTORYSERVER"
+                }, 
+                {
+                    "name": "HIVE_SERVER"
+                }, 
+                {
+                    "name": "SECONDARY_NAMENODE"
+                }, 
+                {
+                    "name": "HIVE_METASTORE"
+                }, 
+                {
+                    "name": "HDFS_CLIENT"
+                }, 
+                {
+                    "name": "HIVE_CLIENT"
+                }, 
+                {
+                    "name": "YARN_CLIENT"
+                }, 
+                {
+                    "name": "MYSQL_SERVER"
+                }, 
+                {
+                    "name": "POSTGRESQL_SERVER"
+                }, 
+                {
+                    "name": "GANGLIA_MONITOR"
+                }, 
+                {
+                    "name": "WEBHCAT_SERVER"
+                }
+            ]
+        }, 
+        {
+            "cardinality": "1", 
+            "name": "master_3", 
+            "components": [
+                {
+                    "name": "RESOURCEMANAGER"
+                }, 
+                {
+                    "name": "ZOOKEEPER_SERVER"
+                }, 
+                {
+                    "name": "GANGLIA_MONITOR"
+                }
+            ]
+        }, 
+        {
+            "cardinality": "1", 
+            "name": "master_4", 
+            "components": [
+                {
+                    "name": "OOZIE_SERVER"
+                }, 
+                {
+                    "name": "ZOOKEEPER_SERVER"
+                }, 
+                {
+                    "name": "GANGLIA_MONITOR"
+                }
+            ]
+        }, 
+        {
+            "cardinality": "${slavesCount}", 
+            "name": "slave", 
+            "components": [
+                {
+                    "name": "HBASE_REGIONSERVER"
+                }, 
+                {
+                    "name": "NODEMANAGER"
+                }, 
+                {
+                    "name": "DATANODE"
+                }, 
+                {
+                    "name": "GANGLIA_MONITOR"
+                }
+            ]
+        }, 
+        {
+            "cardinality": "1", 
+            "name": "gateway", 
+            "components": [
+                {
+                    "name": "AMBARI_SERVER"
+                }, 
+                {
+                    "name": "NAGIOS_SERVER"
+                }, 
+                {
+                    "name": "ZOOKEEPER_CLIENT"
+                }, 
+                {
+                    "name": "PIG"
+                }, 
+                {
+                    "name": "OOZIE_CLIENT"
+                }, 
+                {
+                    "name": "HBASE_CLIENT"
+                }, 
+                {
+                    "name": "HCAT"
+                }, 
+                {
+                    "name": "SQOOP"
+                }, 
+                {
+                    "name": "HDFS_CLIENT"
+                }, 
+                {
+                    "name": "HIVE_CLIENT"
+                }, 
+                {
+                    "name": "YARN_CLIENT"
+                }, 
+                {
+                    "name": "MAPREDUCE2_CLIENT"
+                }, 
+                {
+                    "name": "GANGLIA_MONITOR"
+                }
+            ]
+        }
+    ], 
+    "configurations": [
+        {
+            "nagios-env": {
+                "nagios_contact": "admin@localhost"
+            }
+        }
+    ]
+}
+    }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7d07030/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/blueprints/singlenode-default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/blueprints/singlenode-default.json b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/blueprints/singlenode-default.json
new file mode 100644
index 0000000..9e4881a
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/blueprints/singlenode-default.json
@@ -0,0 +1,137 @@
+{
+    "Blueprints": {
+        "stack_name": "PHD", 
+        "stack_version": "3.0.0.0", 
+        "blueprint_name": "blueprint-singlenode-default"
+    }, 
+    "host_groups": [
+        {
+            "cardinality": "1", 
+            "name": "host_group_1", 
+            "components": [
+                {
+                    "name": "STORM_REST_API"
+                }, 
+                {
+                    "name": "PIG"
+                }, 
+                {
+                    "name": "HISTORYSERVER"
+                }, 
+                {
+                    "name": "HBASE_REGIONSERVER"
+                }, 
+                {
+                    "name": "OOZIE_CLIENT"
+                }, 
+                {
+                    "name": "HBASE_CLIENT"
+                }, 
+                {
+                    "name": "NAMENODE"
+                }, 
+                {
+                    "name": "SUPERVISOR"
+                }, 
+                {
+                    "name": "FALCON_SERVER"
+                }, 
+                {
+                    "name": "HCAT"
+                }, 
+                {
+                    "name": "AMBARI_SERVER"
+                }, 
+                {
+                    "name": "APP_TIMELINE_SERVER"
+                }, 
+                {
+                    "name": "HDFS_CLIENT"
+                }, 
+                {
+                    "name": "HIVE_CLIENT"
+                }, 
+                {
+                    "name": "NODEMANAGER"
+                }, 
+                {
+                    "name": "DATANODE"
+                }, 
+                {
+                    "name": "WEBHCAT_SERVER"
+                }, 
+                {
+                    "name": "RESOURCEMANAGER"
+                }, 
+                {
+                    "name": "ZOOKEEPER_SERVER"
+                }, 
+                {
+                    "name": "ZOOKEEPER_CLIENT"
+                }, 
+                {
+                    "name": "STORM_UI_SERVER"
+                }, 
+                {
+                    "name": "HBASE_MASTER"
+                }, 
+                {
+                    "name": "HIVE_SERVER"
+                }, 
+                {
+                    "name": "OOZIE_SERVER"
+                }, 
+                {
+                    "name": "FALCON_CLIENT"
+                }, 
+                {
+                    "name": "NAGIOS_SERVER"
+                }, 
+                {
+                    "name": "SECONDARY_NAMENODE"
+                }, 
+                {
+                    "name": "TEZ_CLIENT"
+                }, 
+                {
+                    "name": "HIVE_METASTORE"
+                }, 
+                {
+                    "name": "GANGLIA_SERVER"
+                }, 
+                {
+                    "name": "SQOOP"
+                }, 
+                {
+                    "name": "YARN_CLIENT"
+                }, 
+                {
+                    "name": "MAPREDUCE2_CLIENT"
+                }, 
+                {
+                    "name": "MYSQL_SERVER"
+                }, 
+                {
+                    "name": "POSTGRESQL_SERVER"
+                }, 
+                {
+                    "name": "GANGLIA_MONITOR"
+                }, 
+                {
+                    "name": "DRPC_SERVER"
+                }, 
+                {
+                    "name": "NIMBUS"
+                }
+            ]
+        }
+    ], 
+    "configurations": [
+        {
+            "nagios-env": {
+                "nagios_contact": "admin@localhost"
+            }
+        }
+    ]
+}
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7d07030/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/configuration/cluster-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/configuration/cluster-env.xml b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/configuration/cluster-env.xml
new file mode 100644
index 0000000..d41ff98
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/configuration/cluster-env.xml
@@ -0,0 +1,56 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+
+<configuration>
+    <property>
+        <name>security_enabled</name>
+        <value>false</value>
+        <description>Hadoop Security</description>
+    </property>
+    <property>
+        <name>kerberos_domain</name>
+        <value>EXAMPLE.COM</value>
+        <description>Kerberos realm.</description>
+    </property>
+    <property>
+        <name>ignore_groupsusers_create</name>
+        <value>false</value>
+        <description>Whether to ignore failures on users and group creation</description>
+    </property>
+    <property>
+        <name>smokeuser</name>
+        <value>ambari-qa</value>
+        <property-type>USER</property-type>
+        <description>User executing service checks</description>
+    </property>
+    <property>
+        <name>smokeuser_keytab</name>
+        <value>/etc/security/keytabs/smokeuser.headless.keytab</value>
+        <description>Path to smoke test user keytab file</description>
+    </property>
+    <property>
+        <name>user_group</name>
+        <value>hadoop</value>
+        <property-type>GROUP</property-type>
+        <description>Hadoop user group.</description>
+    </property>
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7d07030/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/after-INSTALL/scripts/hook.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/after-INSTALL/scripts/hook.py b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/after-INSTALL/scripts/hook.py
new file mode 100644
index 0000000..16fe7dd
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/after-INSTALL/scripts/hook.py
@@ -0,0 +1,35 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import sys
+from resource_management import *
+from shared_initialization import *
+
+#Hook for hosts with only client without other components
+class AfterInstallHook(Hook):
+
+  def hook(self, env):
+    import params
+
+    env.set_params(params)
+    setup_phd_install_directory()
+    setup_config()
+
+if __name__ == "__main__":
+  AfterInstallHook().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7d07030/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/after-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/after-INSTALL/scripts/params.py b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/after-INSTALL/scripts/params.py
new file mode 100644
index 0000000..e2c47bc
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/after-INSTALL/scripts/params.py
@@ -0,0 +1,73 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+from resource_management.core.system import System
+
+config = Script.get_config()
+
+#RPM versioning support
+rpm_version = default("/configurations/cluster-env/rpm_version", None)
+
+#hadoop params
+if rpm_version:
+  mapreduce_libs_path = "/usr/phd/current/hadoop-mapreduce-client/*"
+  hadoop_libexec_dir = "/usr/phd/current/hadoop-client/libexec"
+else:
+  mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
+  hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
+
+hadoop_conf_dir = "/etc/hadoop/conf"
+hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
+versioned_phd_root = '/usr/phd/current'
+#security params
+security_enabled = config['configurations']['cluster-env']['security_enabled']
+#java params
+java_home = config['hostLevelParams']['java_home']
+#hadoop params
+hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
+hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
+hadoop_root_logger = config['configurations']['hadoop-env']['hadoop_root_logger']
+
+if str(config['hostLevelParams']['stack_version']).startswith('2.0') and System.get_instance().os_family != "suse":
+  # deprecated rhel jsvc_path
+  jsvc_path = "/usr/libexec/phd-utils"
+else:
+  jsvc_path = "/usr/lib/phd-utils"
+
+hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
+namenode_heapsize = config['configurations']['hadoop-env']['namenode_heapsize']
+namenode_opt_newsize =  config['configurations']['hadoop-env']['namenode_opt_newsize']
+namenode_opt_maxnewsize =  config['configurations']['hadoop-env']['namenode_opt_maxnewsize']
+
+jtnode_opt_newsize = "200m"
+jtnode_opt_maxnewsize = "200m"
+jtnode_heapsize =  "1024m"
+ttnode_heapsize = "1024m"
+
+dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize']
+mapred_pid_dir_prefix = default("/configurations/mapred-env/mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
+mapred_log_dir_prefix = default("/configurations/mapred-env/mapred_log_dir_prefix","/var/log/hadoop-mapreduce")
+
+#users and groups
+hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+user_group = config['configurations']['cluster-env']['user_group']
+
+namenode_host = default("/clusterHostInfo/namenode_host", [])
+has_namenode = not len(namenode_host) == 0

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7d07030/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/after-INSTALL/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/after-INSTALL/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/after-INSTALL/scripts/shared_initialization.py
new file mode 100644
index 0000000..2745606
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/after-INSTALL/scripts/shared_initialization.py
@@ -0,0 +1,38 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+import os
+from resource_management import *
+
+def setup_phd_install_directory():
+  import params
+  if params.rpm_version:
+    Execute(format('ambari-python-wrap /usr/bin/phd-select set all `ambari-python-wrap /usr/bin/phd-select versions | grep ^{rpm_version}- | tail -1`'),
+            only_if=format('ls -d /usr/phd/{rpm_version}-*')
+    )
+
+def setup_config():
+  import params
+  if params.has_namenode:
+    XmlConfig("core-site.xml",
+              conf_dir=params.hadoop_conf_dir,
+              configurations=params.config['configurations']['core-site'],
+              configuration_attributes=params.config['configuration_attributes']['core-site'],
+              owner=params.hdfs_user,
+              group=params.user_group
+    )

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7d07030/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-ANY/files/changeToSecureUid.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-ANY/files/changeToSecureUid.sh b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-ANY/files/changeToSecureUid.sh
new file mode 100644
index 0000000..154c1c0
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-ANY/files/changeToSecureUid.sh
@@ -0,0 +1,50 @@
+#!/usr/bin/env bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+username=$1
+directories=$2
+
+function find_available_uid() {
+ for ((i=1001; i<=2000; i++))
+ do
+   grep -q $i /etc/passwd
+   if [ "$?" -ne 0 ]
+   then
+    newUid=$i
+    break
+   fi
+ done
+}
+
+find_available_uid
+
+if [ $newUid -eq 0 ]
+then
+  echo "Failed to find Uid between 1000 and 2000"
+  exit 1
+fi
+
+dir_array=($(echo $directories | sed 's/,/\n/g'))
+old_uid=$(id -u $username)
+echo "Changing uid of $username from $old_uid to $newUid"
+echo "Changing directory permisions for ${dir_array[@]}"
+usermod -u $newUid $username && for dir in ${dir_array[@]} ; do chown -Rh $newUid $dir ; done
+exit 0

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7d07030/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-ANY/scripts/hook.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-ANY/scripts/hook.py b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-ANY/scripts/hook.py
new file mode 100644
index 0000000..1fd36d6
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-ANY/scripts/hook.py
@@ -0,0 +1,35 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+from shared_initialization import *
+
+class BeforeAnyHook(Hook):
+
+  def hook(self, env):
+    import params
+    env.set_params(params)
+    
+    setup_jce()
+    setup_users()
+    setup_hadoop_env()
+
+if __name__ == "__main__":
+  BeforeAnyHook().execute()
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7d07030/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-ANY/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-ANY/scripts/params.py b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-ANY/scripts/params.py
new file mode 100644
index 0000000..91e3008
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-ANY/scripts/params.py
@@ -0,0 +1,134 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+import collections
+import json
+
+config = Script.get_config()
+tmp_dir = Script.get_tmp_dir()
+
+artifact_dir = format("{tmp_dir}/AMBARI-artifacts/")
+jce_policy_zip = default("/hostLevelParams/jce_name", None) # None when jdk is already installed by user
+jce_location = config['hostLevelParams']['jdk_location']
+jdk_name = default("/hostLevelParams/jdk_name", None)
+java_home = config['hostLevelParams']['java_home']
+
+ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
+
+#RPM versioning support
+rpm_version = default("/configurations/cluster-env/rpm_version", None)
+
+#hadoop params
+if rpm_version:
+  mapreduce_libs_path = "/usr/phd/current/hadoop-mapreduce-client/*"
+  hadoop_libexec_dir = "/usr/phd/current/hadoop-client/libexec"
+  hadoop_home = "/usr/phd/current/hadoop-client"
+else:
+  mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
+  hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
+  hadoop_home = "/usr/lib/hadoop"
+
+hadoop_conf_dir = "/etc/hadoop/conf"
+hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
+versioned_phd_root = '/usr/phd/current'
+#security params
+security_enabled = config['configurations']['cluster-env']['security_enabled']
+
+#hadoop params
+hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
+hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
+hadoop_root_logger = config['configurations']['hadoop-env']['hadoop_root_logger']
+
+if str(config['hostLevelParams']['stack_version']).startswith('2.0') and System.get_instance().os_family != "suse":
+  # deprecated rhel jsvc_path
+  jsvc_path = "/usr/libexec/phd-utils"
+else:
+  jsvc_path = "/usr/lib/phd-utils"
+
+hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
+namenode_heapsize = config['configurations']['hadoop-env']['namenode_heapsize']
+namenode_opt_newsize =  config['configurations']['hadoop-env']['namenode_opt_newsize']
+namenode_opt_maxnewsize =  config['configurations']['hadoop-env']['namenode_opt_maxnewsize']
+
+jtnode_opt_newsize = "200m"
+jtnode_opt_maxnewsize = "200m"
+jtnode_heapsize =  "1024m"
+ttnode_heapsize = "1024m"
+
+dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize']
+mapred_pid_dir_prefix = default("/configurations/mapred-env/mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
+mapred_log_dir_prefix = default("/configurations/mapred-env/mapred_log_dir_prefix","/var/log/hadoop-mapreduce")
+hadoop_env_sh_template = config['configurations']['hadoop-env']['content']
+
+#users and groups
+hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+hbase_user = config['configurations']['hbase-env']['hbase_user']
+nagios_user = config['configurations']['nagios-env']['nagios_user']
+smoke_user =  config['configurations']['cluster-env']['smokeuser']
+gmetad_user = config['configurations']['ganglia-env']["gmetad_user"]
+gmond_user = config['configurations']['ganglia-env']["gmond_user"]
+tez_user = config['configurations']['tez-env']["tez_user"]
+oozie_user = config['configurations']['oozie-env']["oozie_user"]
+
+user_group = config['configurations']['cluster-env']['user_group']
+
+hagios_server_hosts = default("/clusterHostInfo/nagios_server_host", [])
+ganglia_server_hosts = default("/clusterHostInfo/ganglia_server_host", [])
+namenode_host = default("/clusterHostInfo/namenode_host", [])
+hbase_master_hosts = default("/clusterHostInfo/hbase_master_hosts", [])
+oozie_servers = default("/clusterHostInfo/oozie_server", [])
+
+has_namenode = not len(namenode_host) == 0
+has_nagios = not len(hagios_server_hosts) == 0
+has_ganglia_server = not len(ganglia_server_hosts) == 0
+has_tez = 'tez-site' in config['configurations']
+has_hbase_masters = not len(hbase_master_hosts) == 0
+has_oozie_server = not len(oozie_servers) == 0
+
+hbase_tmp_dir = config['configurations']['hbase-site']['hbase.tmp.dir']
+
+proxyuser_group = default("/configurations/hadoop-env/proxyuser_group","users")
+nagios_group = config['configurations']['nagios-env']['nagios_group']
+
+ignore_groupsusers_create = default("/configurations/cluster-env/ignore_groupsusers_create", False)
+
+smoke_user_dirs = format("/tmp/hadoop-{smoke_user},/tmp/hsperfdata_{smoke_user},/home/{smoke_user},/tmp/{smoke_user},/tmp/sqoop-{smoke_user}")
+if has_hbase_masters:
+  hbase_user_dirs = format("/home/{hbase_user},/tmp/{hbase_user},/usr/bin/{hbase_user},/var/log/{hbase_user},{hbase_tmp_dir}")
+#repo params
+repo_info = config['hostLevelParams']['repo_info']
+service_repo_info = default("/hostLevelParams/service_repo_info",None)
+
+user_to_groups_dict = collections.defaultdict(lambda:[user_group])
+user_to_groups_dict[smoke_user] = [proxyuser_group]
+if has_ganglia_server:
+  user_to_groups_dict[gmond_user] = [gmond_user]
+  user_to_groups_dict[gmetad_user] = [gmetad_user]
+if has_tez:
+  user_to_groups_dict[tez_user] = [proxyuser_group]
+if has_oozie_server:
+  user_to_groups_dict[oozie_user] = [proxyuser_group]
+
+user_to_gid_dict = collections.defaultdict(lambda:user_group)
+if has_nagios:
+  user_to_gid_dict[nagios_user] = nagios_group
+
+user_list = json.loads(config['hostLevelParams']['user_list'])
+group_list = json.loads(config['hostLevelParams']['group_list'])

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7d07030/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-ANY/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-ANY/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-ANY/scripts/shared_initialization.py
new file mode 100644
index 0000000..126b8bb
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-ANY/scripts/shared_initialization.py
@@ -0,0 +1,114 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import os
+
+from resource_management import *
+
+
+
+def setup_jce():
+  import params
+  
+  if not params.jdk_name:
+    return
+  
+  environment = {
+    "no_proxy": format("{ambari_server_hostname}")
+  }
+  
+  if params.jce_policy_zip is not None:
+    jce_curl_target = format("{artifact_dir}/{jce_policy_zip}")
+    download_jce = format("mkdir -p {artifact_dir}; \
+    curl -kf -x \"\" --retry 10 \
+    {jce_location}/{jce_policy_zip} -o {jce_curl_target}")
+    Execute( download_jce,
+             path = ["/bin","/usr/bin/"],
+             not_if =format("test -e {jce_curl_target}"),
+             ignore_failures = True,
+             environment = environment
+    )
+  elif params.security_enabled:
+    # Something weird is happening
+    raise Fail("Security is enabled, but JCE policy zip is not specified.")
+  
+  if params.security_enabled:
+    security_dir = format("{java_home}/jre/lib/security")
+    extract_cmd = format("rm -f local_policy.jar; rm -f US_export_policy.jar; unzip -o -j -q {jce_curl_target}")
+    Execute(extract_cmd,
+            only_if = format("test -e {security_dir} && test -f {jce_curl_target}"),
+            cwd  = security_dir,
+            path = ['/bin/','/usr/bin']
+    )
+
+def setup_users():
+  """
+  Creates users before cluster installation
+  """
+  import params
+  
+  for group in params.group_list:
+    Group(group,
+        ignore_failures = params.ignore_groupsusers_create
+    )
+    
+  for user in params.user_list:
+    User(user,
+        gid = params.user_to_gid_dict[user],
+        groups = params.user_to_groups_dict[user],
+        ignore_failures = params.ignore_groupsusers_create       
+    )
+           
+  set_uid(params.smoke_user, params.smoke_user_dirs)
+
+  if params.has_hbase_masters:
+    set_uid(params.hbase_user, params.hbase_user_dirs)
+    
+def set_uid(user, user_dirs):
+  """
+  user_dirs - comma separated directories
+  """
+  import params
+
+  File(format("{tmp_dir}/changeUid.sh"),
+       content=StaticFile("changeToSecureUid.sh"),
+       mode=0555)
+  Execute(format("{tmp_dir}/changeUid.sh {user} {user_dirs} 2>/dev/null"),
+          not_if = format("test $(id -u {user}) -gt 1000"))
+    
+def setup_hadoop_env():
+  import params
+  if params.has_namenode:
+    if params.security_enabled:
+      tc_owner = "root"
+    else:
+      tc_owner = params.hdfs_user
+    Directory(params.hadoop_conf_empty_dir,
+              recursive=True,
+              owner='root',
+              group='root'
+    )
+    Link(params.hadoop_conf_dir,
+         to=params.hadoop_conf_empty_dir,
+         not_if=format("ls {hadoop_conf_dir}")
+    )
+    File(os.path.join(params.hadoop_conf_dir, 'hadoop-env.sh'),
+         owner=tc_owner,
+         content=InlineTemplate(params.hadoop_env_sh_template)
+    )

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7d07030/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-INSTALL/scripts/hook.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-INSTALL/scripts/hook.py b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-INSTALL/scripts/hook.py
new file mode 100644
index 0000000..61fba18
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-INSTALL/scripts/hook.py
@@ -0,0 +1,38 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import sys
+from resource_management import *
+from shared_initialization import *
+from repo_initialization import *
+
+class BeforeInstallHook(Hook):
+
+  def hook(self, env):
+    import params
+
+    self.run_custom_hook('before-ANY')
+    env.set_params(params)
+    
+    install_repos()
+    install_packages()
+    setup_java()
+
+if __name__ == "__main__":
+  BeforeInstallHook().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7d07030/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-INSTALL/scripts/params.py b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-INSTALL/scripts/params.py
new file mode 100644
index 0000000..a8ad47e
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-INSTALL/scripts/params.py
@@ -0,0 +1,122 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+from resource_management.core.system import System
+import json
+import collections
+
+config = Script.get_config()
+tmp_dir = Script.get_tmp_dir()
+
+#RPM versioning support
+rpm_version = default("/configurations/cluster-env/rpm_version", None)
+
+#users and groups
+hbase_user = config['configurations']['hbase-env']['hbase_user']
+nagios_user = config['configurations']['nagios-env']['nagios_user']
+smoke_user =  config['configurations']['cluster-env']['smokeuser']
+gmetad_user = config['configurations']['ganglia-env']["gmetad_user"]
+gmond_user = config['configurations']['ganglia-env']["gmond_user"]
+tez_user = config['configurations']['tez-env']["tez_user"]
+
+user_group = config['configurations']['cluster-env']['user_group']
+proxyuser_group = default("/configurations/hadoop-env/proxyuser_group","users")
+nagios_group = config['configurations']['nagios-env']['nagios_group']
+
+hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
+
+#hosts
+hostname = config["hostname"]
+ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
+rm_host = default("/clusterHostInfo/rm_host", [])
+slave_hosts = default("/clusterHostInfo/slave_hosts", [])
+hagios_server_hosts = default("/clusterHostInfo/nagios_server_host", [])
+oozie_servers = default("/clusterHostInfo/oozie_server", [])
+hcat_server_hosts = default("/clusterHostInfo/webhcat_server_host", [])
+hive_server_host =  default("/clusterHostInfo/hive_server_host", [])
+hbase_master_hosts = default("/clusterHostInfo/hbase_master_hosts", [])
+hs_host = default("/clusterHostInfo/hs_host", [])
+jtnode_host = default("/clusterHostInfo/jtnode_host", [])
+namenode_host = default("/clusterHostInfo/namenode_host", [])
+zk_hosts = default("/clusterHostInfo/zookeeper_hosts", [])
+ganglia_server_hosts = default("/clusterHostInfo/ganglia_server_host", [])
+storm_server_hosts = default("/clusterHostInfo/nimbus_hosts", [])
+falcon_host =  default('/clusterHostInfo/falcon_server_hosts', [])
+
+has_sqoop_client = 'sqoop-env' in config['configurations']
+has_namenode = not len(namenode_host) == 0
+has_hs = not len(hs_host) == 0
+has_resourcemanager = not len(rm_host) == 0
+has_slaves = not len(slave_hosts) == 0
+has_nagios = not len(hagios_server_hosts) == 0
+has_oozie_server = not len(oozie_servers)  == 0
+has_hcat_server_host = not len(hcat_server_hosts)  == 0
+has_hive_server_host = not len(hive_server_host)  == 0
+has_hbase_masters = not len(hbase_master_hosts) == 0
+has_zk_host = not len(zk_hosts) == 0
+has_ganglia_server = not len(ganglia_server_hosts) == 0
+has_storm_server = not len(storm_server_hosts) == 0
+has_falcon_server = not len(falcon_host) == 0
+has_tez = 'tez-site' in config['configurations']
+
+is_namenode_master = hostname in namenode_host
+is_jtnode_master = hostname in jtnode_host
+is_rmnode_master = hostname in rm_host
+is_hsnode_master = hostname in hs_host
+is_hbase_master = hostname in hbase_master_hosts
+is_slave = hostname in slave_hosts
+if has_ganglia_server:
+  ganglia_server_host = ganglia_server_hosts[0]
+
+hbase_tmp_dir = config['configurations']['hbase-site']['hbase.tmp.dir']
+
+#security params
+security_enabled = config['configurations']['cluster-env']['security_enabled']
+
+#java params
+java_home = config['hostLevelParams']['java_home']
+artifact_dir = format("{tmp_dir}/AMBARI-artifacts/")
+jdk_name = default("/hostLevelParams/jdk_name", None) # None when jdk is already installed by user
+jce_policy_zip = default("/hostLevelParams/jce_name", None) # None when jdk is already installed by user
+jce_location = config['hostLevelParams']['jdk_location']
+jdk_location = config['hostLevelParams']['jdk_location']
+ignore_groupsusers_create = default("/configurations/cluster-env/ignore_groupsusers_create", False)
+
+smoke_user_dirs = format("/tmp/hadoop-{smoke_user},/tmp/hsperfdata_{smoke_user},/home/{smoke_user},/tmp/{smoke_user},/tmp/sqoop-{smoke_user}")
+if has_hbase_masters:
+  hbase_user_dirs = format("/home/{hbase_user},/tmp/{hbase_user},/usr/bin/{hbase_user},/var/log/{hbase_user},{hbase_tmp_dir}")
+#repo params
+repo_info = config['hostLevelParams']['repo_info']
+service_repo_info = default("/hostLevelParams/service_repo_info",None)
+
+user_to_groups_dict = collections.defaultdict(lambda:[user_group])
+user_to_groups_dict[smoke_user] = [proxyuser_group]
+if has_ganglia_server:
+  user_to_groups_dict[gmond_user] = [gmond_user]
+  user_to_groups_dict[gmetad_user] = [gmetad_user]
+if has_tez:
+  user_to_groups_dict[tez_user] = [proxyuser_group]
+
+user_to_gid_dict = collections.defaultdict(lambda:user_group)
+if has_nagios:
+  user_to_gid_dict[nagios_user] = nagios_group
+
+user_list = json.loads(config['hostLevelParams']['user_list'])
+group_list = json.loads(config['hostLevelParams']['group_list'])

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7d07030/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-INSTALL/scripts/repo_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-INSTALL/scripts/repo_initialization.py b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-INSTALL/scripts/repo_initialization.py
new file mode 100644
index 0000000..39a59cd
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-INSTALL/scripts/repo_initialization.py
@@ -0,0 +1,57 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+from resource_management import *
+import json
+
+# components_lits = repoName + postfix
+_UBUNTU_REPO_COMPONENTS_POSTFIX = ["main"]
+
+def _alter_repo(action, repo_string, repo_template):
+  """
+  @param action: "delete" or "create"
+  @param repo_string: e.g. "[{\"baseUrl\":\"http://public-repo-1.hortonworks.com/PHD/centos6/2.x/updates/2.0.6.0\",\"osType\":\"centos6\",\"repoId\":\"PHD-2.0._\",\"repoName\":\"PHD\",\"defaultBaseUrl\":\"http://public-repo-1.hortonworks.com/PHD/centos6/2.x/updates/2.0.6.0\"}]"
+  """
+  repo_dicts = json.loads(repo_string)
+
+  if not isinstance(repo_dicts, list):
+    repo_dicts = [repo_dicts]
+
+  for repo in repo_dicts:
+    if not 'baseUrl' in repo:
+      repo['baseUrl'] = None
+    if not 'mirrorsList' in repo:
+      repo['mirrorsList'] = None
+    
+    ubuntu_components = [ repo['repoName'] ] + _UBUNTU_REPO_COMPONENTS_POSTFIX
+    
+    Repository(repo['repoId'],
+               action = action,
+               base_url = repo['baseUrl'],
+               mirror_list = repo['mirrorsList'],
+               repo_file_name = repo['repoName'],
+               repo_template = repo_template,
+               components = ubuntu_components, # ubuntu specific
+    )
+
+def install_repos():
+  import params
+  template = "repo_suse_rhel.j2" if System.get_instance().os_family in ["suse", "redhat"] else "repo_ubuntu.j2"
+  _alter_repo("create", params.repo_info, template)
+  if params.service_repo_info:
+    _alter_repo("create", params.service_repo_info, template)

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7d07030/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-INSTALL/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-INSTALL/scripts/shared_initialization.py b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-INSTALL/scripts/shared_initialization.py
new file mode 100644
index 0000000..1b2559b
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-INSTALL/scripts/shared_initialization.py
@@ -0,0 +1,63 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import os
+
+from resource_management import *
+
+def setup_java():
+  """
+  Installs jdk using specific params, that comes from ambari-server
+  """
+  import params
+
+  jdk_curl_target = format("{artifact_dir}/{jdk_name}")
+  java_dir = os.path.dirname(params.java_home)
+  java_exec = format("{java_home}/bin/java")
+
+  if not params.jdk_name:
+    return
+
+  environment = {
+    "no_proxy": format("{ambari_server_hostname}")
+  }
+
+  Execute(format("mkdir -p {artifact_dir} ; \
+  curl -kf -x \"\" \
+  --retry 10 {jdk_location}/{jdk_name} -o {jdk_curl_target}"),
+          path = ["/bin","/usr/bin/"],
+          not_if = format("test -e {java_exec}"),
+          environment = environment)
+
+  if params.jdk_name.endswith(".bin"):
+    install_cmd = format("mkdir -p {java_dir} ; chmod +x {jdk_curl_target}; cd {java_dir} ; echo A | {jdk_curl_target} -noregister > /dev/null 2>&1")
+  elif params.jdk_name.endswith(".gz"):
+    install_cmd = format("mkdir -p {java_dir} ; cd {java_dir} ; tar -xf {jdk_curl_target} > /dev/null 2>&1")
+
+  Execute(install_cmd,
+          path = ["/bin","/usr/bin/"],
+          not_if = format("test -e {java_exec}")
+  )
+
+def install_packages():
+  import params
+  packages = ['unzip', 'curl']
+  if params.rpm_version:
+    packages.append('phd-select')
+  Package(packages)

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7d07030/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-INSTALL/templates/repo_suse_rhel.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-INSTALL/templates/repo_suse_rhel.j2 b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-INSTALL/templates/repo_suse_rhel.j2
new file mode 100644
index 0000000..d486f89
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-INSTALL/templates/repo_suse_rhel.j2
@@ -0,0 +1,7 @@
+[{{repo_id}}]
+name={{repo_file_name}}
+{% if mirror_list %}mirrorlist={{mirror_list}}{% else %}baseurl={{base_url}}{% endif %}
+
+path=/
+enabled=1
+gpgcheck=0

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7d07030/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-INSTALL/templates/repo_ubuntu.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-INSTALL/templates/repo_ubuntu.j2 b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-INSTALL/templates/repo_ubuntu.j2
new file mode 100644
index 0000000..52d4c9a
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-INSTALL/templates/repo_ubuntu.j2
@@ -0,0 +1 @@
+{{package_type}} {{base_url}} {{components}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7d07030/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-RESTART/scripts/hook.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-RESTART/scripts/hook.py b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-RESTART/scripts/hook.py
new file mode 100644
index 0000000..14b9d99
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-RESTART/scripts/hook.py
@@ -0,0 +1,29 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+
+class BeforeRestartHook(Hook):
+
+  def hook(self, env):
+    self.run_custom_hook('before-START')
+
+if __name__ == "__main__":
+  BeforeRestartHook().execute()
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7d07030/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-START/files/checkForFormat.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-START/files/checkForFormat.sh b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-START/files/checkForFormat.sh
new file mode 100644
index 0000000..82dbda1
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-START/files/checkForFormat.sh
@@ -0,0 +1,65 @@
+#!/usr/bin/env bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+export hdfs_user=$1
+shift
+export conf_dir=$1
+shift
+export bin_dir=$1
+shift
+export mark_dir=$1
+shift
+export name_dirs=$*
+
+export EXIT_CODE=0
+export command="namenode -format"
+export list_of_non_empty_dirs=""
+
+mark_file=/var/run/hadoop/hdfs/namenode-formatted
+if [[ -f ${mark_file} ]] ; then
+  rm -f ${mark_file}
+  mkdir -p ${mark_dir}
+fi
+
+if [[ ! -d $mark_dir ]] ; then
+  for dir in `echo $name_dirs | tr ',' ' '` ; do
+    echo "NameNode Dirname = $dir"
+    cmd="ls $dir | wc -l  | grep -q ^0$"
+    eval $cmd
+    if [[ $? -ne 0 ]] ; then
+      (( EXIT_CODE = $EXIT_CODE + 1 ))
+      list_of_non_empty_dirs="$list_of_non_empty_dirs $dir"
+    fi
+  done
+
+  if [[ $EXIT_CODE == 0 ]] ; then
+    export PATH=$PATH:$bin_dir
+    su -s /bin/bash - ${hdfs_user} -c "yes Y | hadoop --config ${conf_dir} ${command}"
+  else
+    echo "ERROR: Namenode directory(s) is non empty. Will not format the namenode. List of non-empty namenode dirs ${list_of_non_empty_dirs}"
+  fi
+else
+  echo "${mark_dir} exists. Namenode DFS already formatted"
+fi
+
+exit $EXIT_CODE
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7d07030/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-START/files/task-log4j.properties
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-START/files/task-log4j.properties b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-START/files/task-log4j.properties
new file mode 100644
index 0000000..7e12962
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-START/files/task-log4j.properties
@@ -0,0 +1,134 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+
+# Define some default values that can be overridden by system properties
+hadoop.root.logger=INFO,console
+hadoop.log.dir=.
+hadoop.log.file=hadoop.log
+
+#
+# Job Summary Appender 
+#
+# Use following logger to send summary to separate file defined by 
+# hadoop.mapreduce.jobsummary.log.file rolled daily:
+# hadoop.mapreduce.jobsummary.logger=INFO,JSA
+# 
+hadoop.mapreduce.jobsummary.logger=${hadoop.root.logger}
+hadoop.mapreduce.jobsummary.log.file=hadoop-mapreduce.jobsummary.log
+
+# Define the root logger to the system property "hadoop.root.logger".
+log4j.rootLogger=${hadoop.root.logger}, EventCounter
+
+# Logging Threshold
+log4j.threshhold=ALL
+
+#
+# Daily Rolling File Appender
+#
+
+log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.DRFA.File=${hadoop.log.dir}/${hadoop.log.file}
+
+# Rollver at midnight
+log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
+
+# 30-day backup
+#log4j.appender.DRFA.MaxBackupIndex=30
+log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
+
+# Pattern format: Date LogLevel LoggerName LogMessage
+log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+# Debugging Pattern format
+#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
+
+
+#
+# console
+# Add "console" to rootlogger above if you want to use this 
+#
+
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
+
+#
+# TaskLog Appender
+#
+
+#Default values
+hadoop.tasklog.taskid=null
+hadoop.tasklog.iscleanup=false
+hadoop.tasklog.noKeepSplits=4
+hadoop.tasklog.totalLogFileSize=100
+hadoop.tasklog.purgeLogSplits=true
+hadoop.tasklog.logsRetainHours=12
+
+log4j.appender.TLA=org.apache.hadoop.mapred.TaskLogAppender
+log4j.appender.TLA.taskId=${hadoop.tasklog.taskid}
+log4j.appender.TLA.isCleanup=${hadoop.tasklog.iscleanup}
+log4j.appender.TLA.totalLogFileSize=${hadoop.tasklog.totalLogFileSize}
+
+log4j.appender.TLA.layout=org.apache.log4j.PatternLayout
+log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+
+#
+# Rolling File Appender
+#
+
+#log4j.appender.RFA=org.apache.log4j.RollingFileAppender
+#log4j.appender.RFA.File=${hadoop.log.dir}/${hadoop.log.file}
+
+# Logfile size and and 30-day backups
+#log4j.appender.RFA.MaxFileSize=1MB
+#log4j.appender.RFA.MaxBackupIndex=30
+
+#log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
+#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} - %m%n
+#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
+
+
+# Custom Logging levels
+
+hadoop.metrics.log.level=INFO
+#log4j.logger.org.apache.hadoop.mapred.JobTracker=DEBUG
+#log4j.logger.org.apache.hadoop.mapred.TaskTracker=DEBUG
+#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
+log4j.logger.org.apache.hadoop.metrics2=${hadoop.metrics.log.level}
+
+# Jets3t library
+log4j.logger.org.jets3t.service.impl.rest.httpclient.RestS3Service=ERROR
+
+#
+# Null Appender
+# Trap security logger on the hadoop client side
+#
+log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender
+
+#
+# Event Counter Appender
+# Sends counts of logging messages at different severity levels to Hadoop Metrics.
+#
+log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter
+ 
+# Removes "deprecated" messages
+log4j.logger.org.apache.hadoop.conf.Configuration.deprecation=WARN

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7d07030/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-START/scripts/hook.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-START/scripts/hook.py b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-START/scripts/hook.py
new file mode 100644
index 0000000..c90a55c
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-START/scripts/hook.py
@@ -0,0 +1,37 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import sys
+from resource_management import *
+from shared_initialization import *
+
+class BeforeStartHook(Hook):
+
+  def hook(self, env):
+    import params
+
+    self.run_custom_hook('before-ANY')
+    env.set_params(params)
+
+    setup_hadoop()
+    setup_configs()
+    create_javahome_symlink()
+
+if __name__ == "__main__":
+  BeforeStartHook().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/e7d07030/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-START/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-START/scripts/params.py b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-START/scripts/params.py
new file mode 100644
index 0000000..96cd6d8
--- /dev/null
+++ b/ambari-server/src/main/resources/stacks/PHD/3.0.0.0/hooks/before-START/scripts/params.py
@@ -0,0 +1,158 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+from resource_management.core.system import System
+import os
+
+config = Script.get_config()
+
+#RPM versioning support
+rpm_version = default("/configurations/cluster-env/rpm_version", None)
+
+#hadoop params
+if rpm_version:
+  mapreduce_libs_path = "/usr/phd/current/hadoop-mapreduce-client/*"
+  hadoop_libexec_dir = "/usr/phd/current/hadoop-client/libexec"
+  hadoop_lib_home = "/usr/phd/current/hadoop-client/lib"
+  hadoop_bin = "/usr/phd/current/hadoop-client/sbin"
+  hadoop_home = '/usr/phd/current/hadoop-client'
+else:
+  mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
+  hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
+  hadoop_lib_home = "/usr/lib/hadoop/lib"
+  hadoop_bin = "/usr/lib/hadoop/sbin"
+  hadoop_home = '/usr'
+
+hadoop_conf_dir = "/etc/hadoop/conf"
+#security params
+security_enabled = config['configurations']['cluster-env']['security_enabled']
+
+#users and groups
+mapred_user = config['configurations']['mapred-env']['mapred_user']
+hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+yarn_user = config['configurations']['yarn-env']['yarn_user']
+
+user_group = config['configurations']['cluster-env']['user_group']
+
+#hosts
+hostname = config["hostname"]
+ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
+rm_host = default("/clusterHostInfo/rm_host", [])
+slave_hosts = default("/clusterHostInfo/slave_hosts", [])
+hagios_server_hosts = default("/clusterHostInfo/nagios_server_host", [])
+oozie_servers = default("/clusterHostInfo/oozie_server", [])
+hcat_server_hosts = default("/clusterHostInfo/webhcat_server_host", [])
+hive_server_host =  default("/clusterHostInfo/hive_server_host", [])
+hbase_master_hosts = default("/clusterHostInfo/hbase_master_hosts", [])
+hs_host = default("/clusterHostInfo/hs_host", [])
+jtnode_host = default("/clusterHostInfo/jtnode_host", [])
+namenode_host = default("/clusterHostInfo/namenode_host", [])
+zk_hosts = default("/clusterHostInfo/zookeeper_hosts", [])
+ganglia_server_hosts = default("/clusterHostInfo/ganglia_server_host", [])
+
+has_namenode = not len(namenode_host) == 0
+has_resourcemanager = not len(rm_host) == 0
+has_slaves = not len(slave_hosts) == 0
+has_nagios = not len(hagios_server_hosts) == 0
+has_oozie_server = not len(oozie_servers)  == 0
+has_hcat_server_host = not len(hcat_server_hosts)  == 0
+has_hive_server_host = not len(hive_server_host)  == 0
+has_hbase_masters = not len(hbase_master_hosts) == 0
+has_zk_host = not len(zk_hosts) == 0
+has_ganglia_server = not len(ganglia_server_hosts) == 0
+
+is_namenode_master = hostname in namenode_host
+is_jtnode_master = hostname in jtnode_host
+is_rmnode_master = hostname in rm_host
+is_hsnode_master = hostname in hs_host
+is_hbase_master = hostname in hbase_master_hosts
+is_slave = hostname in slave_hosts
+if has_ganglia_server:
+  ganglia_server_host = ganglia_server_hosts[0]
+#hadoop params
+
+if has_namenode:
+  hadoop_tmp_dir = format("/tmp/hadoop-{hdfs_user}")
+hadoop_pid_dir_prefix = config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
+
+task_log4j_properties_location = os.path.join(hadoop_conf_dir, "task-log4j.properties")
+
+hdfs_log_dir_prefix = config['configurations']['hadoop-env']['hdfs_log_dir_prefix']
+hbase_tmp_dir = config['configurations']['hbase-site']['hbase.tmp.dir']
+#db params
+server_db_name = config['hostLevelParams']['db_name']
+db_driver_filename = config['hostLevelParams']['db_driver_filename']
+oracle_driver_url = config['hostLevelParams']['oracle_jdbc_url']
+mysql_driver_url = config['hostLevelParams']['mysql_jdbc_url']
+ambari_server_resources = config['hostLevelParams']['jdk_location']
+oracle_driver_symlink_url = format("{ambari_server_resources}oracle-jdbc-driver.jar")
+mysql_driver_symlink_url = format("{ambari_server_resources}mysql-jdbc-driver.jar")
+
+ambari_db_rca_url = config['hostLevelParams']['ambari_db_rca_url'][0]
+ambari_db_rca_driver = config['hostLevelParams']['ambari_db_rca_driver'][0]
+ambari_db_rca_username = config['hostLevelParams']['ambari_db_rca_username'][0]
+ambari_db_rca_password = config['hostLevelParams']['ambari_db_rca_password'][0]
+
+if has_namenode and 'rca_enabled' in config['configurations']['hadoop-env']:
+  rca_enabled =  config['configurations']['hadoop-env']['rca_enabled']
+else:
+  rca_enabled = False
+rca_disabled_prefix = "###"
+if rca_enabled == True:
+  rca_prefix = ""
+else:
+  rca_prefix = rca_disabled_prefix
+
+#hadoop-env.sh
+java_home = config['hostLevelParams']['java_home']
+
+if str(config['hostLevelParams']['stack_version']).startswith('2.0') and System.get_instance().os_family != "suse":
+  # deprecated rhel jsvc_path
+  jsvc_path = "/usr/libexec/phd-utils"
+else:
+  jsvc_path = "/usr/lib/phd-utils"
+
+hadoop_heapsize = config['configurations']['hadoop-env']['hadoop_heapsize']
+namenode_heapsize = config['configurations']['hadoop-env']['namenode_heapsize']
+namenode_opt_newsize =  config['configurations']['hadoop-env']['namenode_opt_newsize']
+namenode_opt_maxnewsize =  config['configurations']['hadoop-env']['namenode_opt_maxnewsize']
+
+jtnode_opt_newsize = "200m"
+jtnode_opt_maxnewsize = "200m"
+jtnode_heapsize =  "1024m"
+ttnode_heapsize = "1024m"
+
+dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize']
+mapred_pid_dir_prefix = default("/configurations/mapred-env/mapred_pid_dir_prefix","/var/run/hadoop-mapreduce")
+mapred_log_dir_prefix = default("/configurations/mapred-env/mapred_log_dir_prefix","/var/log/hadoop-mapreduce")
+
+#log4j.properties
+
+yarn_log_dir_prefix = default("/configurations/yarn-env/yarn_log_dir_prefix","/var/log/hadoop-yarn")
+
+dfs_hosts = default('/configurations/hdfs-site/dfs.hosts', None)
+
+#log4j.properties
+if (('hdfs-log4j' in config['configurations']) and ('content' in config['configurations']['hdfs-log4j'])):
+  log4j_props = config['configurations']['hdfs-log4j']['content']
+  if (('yarn-log4j' in config['configurations']) and ('content' in config['configurations']['yarn-log4j'])):
+    log4j_props += config['configurations']['yarn-log4j']['content']
+else:
+  log4j_props = None