You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by yu...@apache.org on 2015/11/07 01:47:29 UTC

[1/3] ambari git commit: Revert "AMBARI-13725: HAWQ and PXF to support 3 digit versioning instead of 4. (jaoki)" Reverted due to rat check failures. This reverts commit 5dac27bef4a89a049c7e77f49828a20370d0b518.

Repository: ambari
Updated Branches:
  refs/heads/trunk 6e8760d32 -> 810a32abb


http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/pxf.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/pxf.py b/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/pxf.py
deleted file mode 100644
index dd0031c..0000000
--- a/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/pxf.py
+++ /dev/null
@@ -1,120 +0,0 @@
-#!/usr/bin/env python
-
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-from resource_management import Script
-
-from resource_management.libraries.resources.xml_config import XmlConfig
-from resource_management.core.resources.accounts import User
-from resource_management.core.resources.system import Directory, File, Execute
-from resource_management.core.source import Template
-
-
-
-class Pxf(Script):
-  """
-  Contains the interface definitions for methods like install,
-  start, stop, status, etc. for the PXF
-  """
-
-  def install(self, env):
-    self.install_packages(env)
-    self.configure(env)
-
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-    self.__setup_user_group()
-    self.__generate_config_files()
-    # pxf-service init exits safely when it is already initialized
-    self.__execute_service_command("init")
-
-
-  def start(self, env):
-    self.configure(env)
-    self.__grant_permissions()
-    self.__execute_service_command("restart")
-
-
-  def stop(self, env):
-    self.__execute_service_command("stop")
-
-
-  def status(self, env):
-    try:
-      self.__execute_service_command("status")
-    except Exception:
-      raise ComponentIsNotRunning()
-
-
-  def __execute_service_command(self, command):
-    import params
-    Execute("service {0} {1}".format(params.pxf_service_name, command),
-              timeout=params.default_exec_timeout,
-              logoutput=True)
-
-
-  def __setup_user_group(self):
-    """
-    Creates PXF user with the required groups and bash as default shell
-    """
-    import params
-    User(params.pxf_user,
-         groups=[params.hdfs_superuser_group, params.user_group, params.tomcat_group],
-         shell="/bin/bash")
-
-
-  def __generate_config_files(self):
-    """
-    Generates pxf-env.sh file from jinja template and sets the classpath for HDP
-    """
-    import params
-    import shutil
-
-    hdp_stack = "HDP"
-
-    # Create file pxf-env.sh from jinja template
-    File("{0}/pxf-env.sh".format(params.pxf_conf_dir),
-         content = Template("pxf-env.j2"))
-
-    # Classpath is set for PHD by default. If stack is HDP, set classpath for HDP
-    if(params.stack_name == hdp_stack):
-      shutil.copy2("{0}/pxf-privatehdp.classpath".format(params.pxf_conf_dir),
-                   "{0}/pxf-private.classpath".format(params.pxf_conf_dir))
-
-    XmlConfig("pxf-site.xml",
-              conf_dir=params.pxf_conf_dir,
-              configurations=params.config['configurations']['pxf-site'],
-              configuration_attributes=params.config['configuration_attributes']['pxf-site'])
-
-
-  def __grant_permissions(self):
-    """
-    Grants permission to pxf:pxf for PXF instance directory
-    """
-    import params
-    Directory(params.pxf_instance_dir,
-              owner=params.pxf_user,
-              group=params.pxf_group,
-              recursive=True)
-
-
-if __name__ == "__main__":
-  Pxf().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/templates/pxf-env.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/templates/pxf-env.j2 b/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/templates/pxf-env.j2
deleted file mode 100644
index 03f2420..0000000
--- a/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/templates/pxf-env.j2
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/bin/sh
-
-#Licensed to the Apache Software Foundation (ASF) under one
-#or more contributor license agreements.  See the NOTICE file
-#distributed with this work for additional information
-#regarding copyright ownership.  The ASF licenses this file
-#to you under the Apache License, Version 2.0 (the
-#"License"); you may not use this file except in compliance
-#with the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-#Unless required by applicable law or agreed to in writing, software
-#distributed under the License is distributed on an "AS IS" BASIS,
-#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#See the License for the specific language governing permissions and
-#limitations under the License.
-
-
-# THIS FILE SHOULD MATCH WITH https://github.com/apache/incubator-hawq/blob/master/pxf/pxf-service/src/scripts/pxf-env.sh
-
-# Path to HDFS native libraries
-export LD_LIBRARY_PATH=/usr/lib/hadoop/lib/native:${LD_LIBRARY_PATH}
-
-# Path to JAVA
-export JAVA_HOME={{java_home}}
-
-# Path to Log directory
-export PXF_LOGDIR=/var/log/pxf
-export CATALINA_OUT=${PXF_LOGDIR}/catalina.out
-
-# Path to Run directory
-export PXF_RUNDIR=/var/run/pxf
-


[3/3] ambari git commit: Revert "AMBARI-13725: HAWQ and PXF to support 3 digit versioning instead of 4. (jaoki)" Reverted due to rat check failures. This reverts commit 5dac27bef4a89a049c7e77f49828a20370d0b518.

Posted by yu...@apache.org.
Revert "AMBARI-13725: HAWQ and PXF to support 3 digit versioning instead of 4. (jaoki)"
Reverted due to rat check failures.
This reverts commit 5dac27bef4a89a049c7e77f49828a20370d0b518.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/810a32ab
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/810a32ab
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/810a32ab

Branch: refs/heads/trunk
Commit: 810a32abbdae394fbe8f6f9263e70ec172ae8dfa
Parents: 6e8760d
Author: Yusaku Sako <yu...@hortonworks.com>
Authored: Fri Nov 6 16:47:11 2015 -0800
Committer: Yusaku Sako <yu...@hortonworks.com>
Committed: Fri Nov 6 16:47:11 2015 -0800

----------------------------------------------------------------------
 .../HAWQ/2.0.0.0/configuration/gpcheck-env.xml  |  86 ++++++
 .../2.0.0.0/configuration/hawq-limits-env.xml   |  46 +++
 .../HAWQ/2.0.0.0/configuration/hawq-site.xml    | 178 ++++++++++++
 .../2.0.0.0/configuration/hawq-sysctl-env.xml   | 247 ++++++++++++++++
 .../common-services/HAWQ/2.0.0.0/metainfo.xml   | 129 +++++++++
 .../HAWQ/2.0.0.0/package/scripts/common.py      | 283 +++++++++++++++++++
 .../HAWQ/2.0.0.0/package/scripts/constants.py   |  61 ++++
 .../HAWQ/2.0.0.0/package/scripts/hawqmaster.py  |  55 ++++
 .../HAWQ/2.0.0.0/package/scripts/hawqsegment.py | 102 +++++++
 .../HAWQ/2.0.0.0/package/scripts/hawqstandby.py |  58 ++++
 .../HAWQ/2.0.0.0/package/scripts/hawqstatus.py  |  64 +++++
 .../2.0.0.0/package/scripts/master_helper.py    | 194 +++++++++++++
 .../HAWQ/2.0.0.0/package/scripts/params.py      |  92 ++++++
 .../2.0.0.0/package/scripts/service_check.py    | 102 +++++++
 .../HAWQ/2.0.0.0/package/scripts/utils.py       | 108 +++++++
 .../2.0.0.0/package/templates/hawq-hosts.j2     |   5 +
 .../package/templates/hawq-profile.sh.j2        |   8 +
 .../HAWQ/2.0.0.0/package/templates/slaves.j2    |   3 +
 .../HAWQ/2.0.0/configuration/gpcheck-env.xml    |  86 ------
 .../2.0.0/configuration/hawq-limits-env.xml     |  46 ---
 .../HAWQ/2.0.0/configuration/hawq-site.xml      | 178 ------------
 .../2.0.0/configuration/hawq-sysctl-env.xml     | 247 ----------------
 .../common-services/HAWQ/2.0.0/metainfo.xml     | 129 ---------
 .../HAWQ/2.0.0/package/scripts/common.py        | 283 -------------------
 .../HAWQ/2.0.0/package/scripts/constants.py     |  61 ----
 .../HAWQ/2.0.0/package/scripts/hawqmaster.py    |  55 ----
 .../HAWQ/2.0.0/package/scripts/hawqsegment.py   | 102 -------
 .../HAWQ/2.0.0/package/scripts/hawqstandby.py   |  58 ----
 .../HAWQ/2.0.0/package/scripts/hawqstatus.py    |  64 -----
 .../HAWQ/2.0.0/package/scripts/master_helper.py | 194 -------------
 .../HAWQ/2.0.0/package/scripts/params.py        |  92 ------
 .../HAWQ/2.0.0/package/scripts/service_check.py | 102 -------
 .../HAWQ/2.0.0/package/scripts/utils.py         | 108 -------
 .../HAWQ/2.0.0/package/templates/hawq-hosts.j2  |   5 -
 .../2.0.0/package/templates/hawq-profile.sh.j2  |   8 -
 .../HAWQ/2.0.0/package/templates/slaves.j2      |   3 -
 .../PXF/3.0.0.0/configuration/pxf-site.xml      |  19 ++
 .../common-services/PXF/3.0.0.0/metainfo.xml    |  71 +++++
 .../PXF/3.0.0.0/package/scripts/params.py       |  42 +++
 .../PXF/3.0.0.0/package/scripts/pxf.py          | 120 ++++++++
 .../PXF/3.0.0.0/package/templates/pxf-env.j2    |  34 +++
 .../PXF/3.0.0/configuration/pxf-site.xml        |  19 --
 .../common-services/PXF/3.0.0/metainfo.xml      |  71 -----
 .../PXF/3.0.0/package/scripts/params.py         |  42 ---
 .../PXF/3.0.0/package/scripts/pxf.py            | 120 --------
 .../PXF/3.0.0/package/templates/pxf-env.j2      |  34 ---
 46 files changed, 2107 insertions(+), 2107 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/configuration/gpcheck-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/configuration/gpcheck-env.xml b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/configuration/gpcheck-env.xml
new file mode 100755
index 0000000..a61a34f
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/configuration/gpcheck-env.xml
@@ -0,0 +1,86 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<configuration>
+  <!-- gpcheck.cnf -->
+    <property>
+      <name>content</name>
+      <display-name>Content</display-name>
+      <description>Contents of the configuration file /usr/local/hawq/etc/gpcheck.cnf. This file is used by 'hawq check' command, which can be run manually by gpadmin user on the HAWQ master host. This command validates the system parameters and HDFS parameters mentioned in this file to ensure optimal HAWQ operation.</description>
+        <value>
+[global]
+configfile_version = 4
+
+[linux.mount]
+mount.points = /
+
+[linux.sysctl]
+sysctl.kernel.shmmax = 500000000
+sysctl.kernel.shmmni = 4096
+sysctl.kernel.shmall = 4000000000
+sysctl.kernel.sem = 250 512000 100 2048
+sysctl.kernel.sysrq = 1
+sysctl.kernel.core_uses_pid = 1
+sysctl.kernel.msgmnb = 65536
+sysctl.kernel.msgmax = 65536
+sysctl.kernel.msgmni = 2048
+sysctl.net.ipv4.tcp_syncookies = 0
+sysctl.net.ipv4.ip_forward = 0
+sysctl.net.ipv4.conf.default.accept_source_route = 0
+sysctl.net.ipv4.tcp_tw_recycle = 1
+sysctl.net.ipv4.tcp_max_syn_backlog = 200000
+sysctl.net.ipv4.conf.all.arp_filter = 1
+sysctl.net.ipv4.ip_local_port_range = 1281 65535
+sysctl.net.core.netdev_max_backlog = 200000
+sysctl.vm.overcommit_memory = 2
+sysctl.fs.nr_open = 3000000
+sysctl.kernel.threads-max = 798720
+sysctl.kernel.pid_max = 798720
+# increase network
+sysctl.net.core.rmem_max = 2097152
+sysctl.net.core.wmem_max = 2097152
+
+[linux.limits]
+soft.nofile = 2900000
+hard.nofile = 2900000
+soft.nproc  = 131072
+hard.nproc  = 131072
+
+[linux.diskusage]
+diskusage.monitor.mounts = /
+diskusage.monitor.usagemax = 90%
+
+[hdfs]
+dfs.mem.namenode.heap = 40960
+dfs.mem.datanode.heap = 6144
+# in hdfs-site.xml
+dfs.support.append = true
+dfs.client.enable.read.from.local = true
+dfs.block.local-path-access.user = gpadmin
+dfs.datanode.max.transfer.threads = 40960
+dfs.client.socket-timeout = 300000000
+dfs.datanode.socket.write.timeout = 7200000
+dfs.namenode.handler.count = 60
+ipc.server.handler.queue.size = 3300
+dfs.datanode.handler.count = 60
+ipc.client.connection.maxidletime = 3600000
+dfs.namenode.accesstime.precision = -1
+    </value>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/configuration/hawq-limits-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/configuration/hawq-limits-env.xml b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/configuration/hawq-limits-env.xml
new file mode 100644
index 0000000..d8917e6
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/configuration/hawq-limits-env.xml
@@ -0,0 +1,46 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<configuration>
+    <property>
+        <display-name>soft nofile</display-name>
+        <name>soft_nofile</name>
+        <value>2900000</value>
+        <description>Soft limit for the number of file handles or open files for user gpadmin. Value is set in file /etc/security/limits.d/gpadmin.conf</description>
+    </property>
+
+    <property>
+        <display-name>hard nofile</display-name>
+        <name>hard_nofile</name>
+        <value>2900000</value>
+        <description>Hard limit for the number of file handles or open files for user gpadmin. Value is set in /etc/security/limits.d/gpadmin.conf</description>
+    </property>
+
+    <property>
+        <display-name>soft nproc</display-name>
+        <name>soft_nproc</name>
+        <value>131072</value>
+        <description>Soft limit for the maximum number of processes for user gpadmin. Value is set in /etc/security/limits.d/gpadmin.conf</description>
+    </property>
+
+    <property>
+        <display-name>hard nproc</display-name>
+        <name>hard_nproc</name>
+        <value>131072</value>
+        <description>Hard limit for the maximum number of processes for user gpadmin. Value is set in /etc/security/limits.d/gpadmin.conf</description>
+    </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/configuration/hawq-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/configuration/hawq-site.xml b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/configuration/hawq-site.xml
new file mode 100644
index 0000000..41b10dc
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/configuration/hawq-site.xml
@@ -0,0 +1,178 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<configuration>
+  <property>
+    <name>hawq_master_address_host</name>
+    <display-name>HAWQ Master</display-name>
+    <value>localhost</value>
+    <description>The host name of HAWQ master.</description>
+  </property>
+
+  <property>
+    <name>hawq_standby_address_host</name>
+    <display-name>HAWQ Standby Master</display-name>
+    <value>localhost</value>
+    <description>The host name of HAWQ standby.</description>
+  </property>
+
+  <property>
+    <name>hawq_master_address_port</name>
+    <display-name>HAWQ Master Port</display-name>
+    <value>5432</value>
+    <description>The port of HAWQ master.</description>
+  </property>
+
+  <property>
+    <name>hawq_segment_address_port</name>
+    <display-name>HAWQ Segment Port</display-name>
+    <value>40000</value>
+    <description>The port of HAWQ segment.</description>
+  </property>
+
+  <property>
+    <name>hawq_dfs_url</name>
+    <display-name>HAWQ DFS URL</display-name>
+    <value>localhost:8020/hawq_default</value>
+    <description>URL for accessing HDFS.</description>
+  </property>
+
+  <property>
+    <name>hawq_master_directory</name>
+    <display-name>HAWQ Master Directory</display-name>
+    <value>/data/hawq/master</value>
+    <description>The directory of HAWQ master.</description>
+  </property>
+
+  <property>
+    <name>hawq_segment_directory</name>
+    <display-name>HAWQ Segment Directory</display-name>
+    <value>/data/hawq/segment</value>
+    <description>The directory of HAWQ segment.</description>
+  </property> 
+
+  <property>
+    <name>hawq_master_temp_directory</name>
+    <display-name>HAWQ Master Temp Directory</display-name>
+    <value>/tmp</value>
+    <description>The temporary directory reserved for HAWQ master.</description>
+  </property>
+
+  <property>
+    <name>hawq_segment_temp_directory</name>
+    <display-name>HAWQ Segment Temp Directory</display-name>
+    <value>/tmp</value>
+    <description>The temporary directory reserved for HAWQ segment.</description>
+  </property>
+
+  <!-- HAWQ resource manager parameters -->
+  <property>
+    <name>hawq_global_rm_type</name>
+    <value>none</value>
+    <description>The resource manager type to start for allocating resource. 
+      'none' means HAWQ resource manager exclusively uses whole
+      cluster; 'yarn' means HAWQ resource manager contacts YARN
+      resource manager to negotiate resource.
+    </description>
+  </property>
+
+  <property>
+    <name>hawq_rm_memory_limit_perseg</name>
+    <value>64GB</value>
+    <description>The limit of memory usage in a HAWQ segment when 
+      hawq_global_rm_type is set 'none'.
+    </description>
+  </property>
+
+  <property>
+    <name>hawq_rm_nvcore_limit_perseg</name>
+    <value>16</value>
+    <description>The limit of virtual core usage in a HAWQ segment when 
+      hawq_global_rm_type is set 'none'.
+    </description>
+  </property>
+
+  <property>
+    <name>hawq_rm_yarn_address</name>
+    <value>localhost:8032</value>
+    <description>The address of YARN resource manager server.</description>
+  </property>
+
+  <property>
+    <name>hawq_rm_yarn_scheduler_address</name>
+    <value>localhost:8030</value>
+    <description>The address of YARN scheduler server.</description>
+  </property>
+
+  <property>
+    <name>hawq_rm_yarn_queue_name</name>
+    <value>default</value>
+    <description>The YARN queue name to register HAWQ resource manager.</description>
+  </property>
+
+  <property>
+    <name>hawq_rm_yarn_app_name</name>
+    <value>hawq</value>
+    <description>The application name to register HAWQ resource manager in YARN.</description>
+  </property>
+
+  <property>
+    <name>hawq_re_cpu_enable</name>
+    <value>false</value>
+  </property>
+
+  <property>
+    <name>hawq_re_cgroup_mount_point</name>
+    <value>/sys/fs/cgroup</value>
+  </property>
+
+  <property>
+    <name>hawq_re_cgroup_hierarchy_name</name>
+    <value>hadoop-yarn</value>
+  </property>
+
+  <property>
+    <name>hawq_re_cleanup_period</name>
+    <value>180</value>
+  </property>
+
+  <property>
+    <name>hawq_re_cpu_weight</name>
+    <value>1024.0</value>
+  </property>
+
+  <property>
+    <name>hawq_re_vcore_pcore_ratio</name>
+    <value>1.0</value>
+  </property>
+
+  <property>
+    <name>hawq_resourcemanager_master_address_domainsocket_port</name>
+    <value>5436</value>
+  </property>
+
+  <property>
+    <name>hawq_rm_master_port</name>
+    <value>5437</value>
+  </property>
+
+  <property>
+    <name>hawq_rm_segment_port</name>
+    <value>5438</value>
+  </property>
+
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/configuration/hawq-sysctl-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/configuration/hawq-sysctl-env.xml b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/configuration/hawq-sysctl-env.xml
new file mode 100644
index 0000000..32ae5a5
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/configuration/hawq-sysctl-env.xml
@@ -0,0 +1,247 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<configuration>
+  <property>
+    <name>kernel.shmmax</name>
+    <value>500000000</value>
+    <description>Maximum size in bytes of a single shared memory segment that a Linux process can allocate in its
+      virtual address space</description>
+    <value-attributes>
+      <overridable>false</overridable>
+      <editable-only-at-install>true</editable-only-at-install>
+    </value-attributes>
+  </property>
+
+  <property>
+    <name>kernel.shmmni</name>
+    <value>4096</value>
+    <description>System wide maximum number of shared memory segments</description>
+    <value-attributes>
+      <overridable>false</overridable>
+      <editable-only-at-install>true</editable-only-at-install>
+    </value-attributes>
+  </property>
+
+  <property>
+    <name>kernel.shmall</name>
+    <value>4000000000</value>
+    <description>Total amount of shared memory pages that can be used system wide</description>
+    <value-attributes>
+      <overridable>false</overridable>
+      <editable-only-at-install>true</editable-only-at-install>
+    </value-attributes>
+  </property>
+
+  <property>
+    <name>kernel.sem</name>
+    <value>250 512000 100 2048</value>
+    <description>Parameter to define semaphore related values</description>
+    <value-attributes>
+      <overridable>false</overridable>
+      <editable-only-at-install>true</editable-only-at-install>
+    </value-attributes>
+  </property>
+
+  <property>
+    <name>kernel.sysrq</name>
+    <value>1</value>
+    <description>Enable(1)/Disable(0) functions of sysrq</description>
+    <value-attributes>
+      <overridable>false</overridable>
+      <editable-only-at-install>true</editable-only-at-install>
+    </value-attributes>
+  </property>
+
+  <property>
+    <name>kernel.core_uses_pid</name>
+    <value>1</value>
+    <description>Enable appending process id to the name of core dump file. Ex: core.PID</description>
+    <value-attributes>
+      <overridable>false</overridable>
+      <editable-only-at-install>true</editable-only-at-install>
+    </value-attributes>
+  </property>
+
+  <property>
+    <name>kernel.msgmnb</name>
+    <value>65536</value>
+    <description>Default maximum size of a message in bytes</description>
+    <value-attributes>
+      <overridable>false</overridable>
+      <editable-only-at-install>true</editable-only-at-install>
+    </value-attributes>
+  </property>
+
+  <property>
+    <name>kernel.msgmax</name>
+    <value>65536</value>
+    <description>Default maxmimum size of a mesage queue</description>
+    <value-attributes>
+      <overridable>false</overridable>
+      <editable-only-at-install>true</editable-only-at-install>
+    </value-attributes>
+  </property>
+
+  <property>
+    <name>kernel.msgmni</name>
+    <value>2048</value>
+    <description>Number of message queue identifiers</description>
+    <value-attributes>
+      <overridable>false</overridable>
+      <editable-only-at-install>true</editable-only-at-install>
+    </value-attributes>
+  </property>
+
+  <property>
+    <name>net.ipv4.tcp_syncookies</name>
+    <value>0</value>
+    <description>Enable(1)/Disable(0) SYN cookie protection</description>
+    <value-attributes>
+      <overridable>false</overridable>
+      <editable-only-at-install>true</editable-only-at-install>
+    </value-attributes>
+  </property>
+
+  <property>
+    <name>net.ipv4.ip_forward</name>
+    <value>0</value>
+    <description>Enable(1)/Disable(0) IP forwarding</description>
+    <value-attributes>
+      <overridable>false</overridable>
+      <editable-only-at-install>true</editable-only-at-install>
+    </value-attributes>
+  </property>
+
+  <property>
+    <name>net.ipv4.conf.default.accept_source_route</name>
+    <value>0</value>
+    <description>Enable(1)/Disable(0) IP source routing</description>
+    <value-attributes>
+      <overridable>false</overridable>
+      <editable-only-at-install>true</editable-only-at-install>
+    </value-attributes>
+  </property>
+
+  <property>
+    <name>net.ipv4.tcp_tw_recycle</name>
+    <value>1</value>
+    <description>Enable(1)/Disable(0) fast recycling of TIME_WAIT sockets</description>
+    <value-attributes>
+      <overridable>false</overridable>
+      <editable-only-at-install>true</editable-only-at-install>
+    </value-attributes>
+  </property>
+
+  <property>
+    <name>net.ipv4.tcp_max_syn_backlog</name>
+    <value>200000</value>
+    <description>Number of outstanding syn requests allowed</description>
+    <value-attributes>
+      <overridable>false</overridable>
+      <editable-only-at-install>true</editable-only-at-install>
+    </value-attributes>
+  </property>
+
+  <property>
+    <name>net.ipv4.conf.all.arp_filter</name>
+    <value>1</value>
+    <description>Enable(1)/Disable(0) route lookup to determine the interface through which to send the reply</description>
+    <value-attributes>
+      <overridable>false</overridable>
+      <editable-only-at-install>true</editable-only-at-install>
+    </value-attributes>
+  </property>
+
+  <property>
+    <name>net.ipv4.ip_local_port_range</name>
+    <value>1281 65535</value>
+    <description>Local port range that is used by TCP and UDP traffic to choose the local port</description>
+    <value-attributes>
+      <overridable>false</overridable>
+      <editable-only-at-install>true</editable-only-at-install>
+    </value-attributes>
+  </property>
+
+  <property>
+    <name>net.core.netdev_max_backlog</name>
+    <value>200000</value>
+    <description>Maximum number of packets allowed to queue when a particular interface receives packets faster than the kernel can process them</description>
+    <value-attributes>
+      <overridable>false</overridable>
+      <editable-only-at-install>true</editable-only-at-install>
+    </value-attributes>
+  </property>
+
+  <property>
+    <name>fs.nr_open</name>
+    <value>3000000</value>
+    <description>Maximum number of file-handles a process can allocate</description>
+    <value-attributes>
+      <overridable>false</overridable>
+      <editable-only-at-install>true</editable-only-at-install>
+    </value-attributes>
+  </property>
+
+  <property>
+    <name>kernel.threads-max</name>
+    <value>798720</value>
+    <description>System-wide maximum number of threads to be used by the kernel at one time</description>
+    <value-attributes>
+      <overridable>false</overridable>
+      <editable-only-at-install>true</editable-only-at-install>
+    </value-attributes>
+  </property>
+
+  <property>
+    <name>kernel.pid_max</name>
+    <value>798720</value>
+    <description>Maximum number of unique process identifiers</description>
+    <value-attributes>
+      <overridable>false</overridable>
+      <editable-only-at-install>true</editable-only-at-install>
+    </value-attributes>
+  </property>
+
+  <property>
+    <name>net.core.rmem_max</name>
+    <value>2097152</value>
+    <description>Maximum receive socket buffer size in bytes</description>
+    <value-attributes>
+      <overridable>false</overridable>
+      <editable-only-at-install>true</editable-only-at-install>
+    </value-attributes>
+  </property>
+
+  <property>
+    <name>net.core.wmem_max</name>
+    <value>2097152</value>
+    <description>Maximum send socket buffer size in bytes</description>
+    <value-attributes>
+      <overridable>false</overridable>
+      <editable-only-at-install>true</editable-only-at-install>
+    </value-attributes>
+  </property>
+
+  <property>
+    <name>vm.overcommit_memory</name>
+    <value>1</value>
+    <description>Defines the conditions that determine whether a large memory request is accepted or denied. There are
+      three possible values for this parameter: 0, 1 or 2. For production environment, value of 2 is recommended</description>
+  </property>
+
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/metainfo.xml b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/metainfo.xml
new file mode 100644
index 0000000..2ceff96
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/metainfo.xml
@@ -0,0 +1,129 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>HAWQ</name>
+      <displayName>HAWQ</displayName>
+      <comment>Apache HAWQ - Apache Hadoop Native SQL</comment>
+      <version>2.0</version>
+      <components>
+
+        <component>
+          <name>HAWQMASTER</name>
+          <displayName>HAWQ Master</displayName>
+          <category>MASTER</category>
+          <cardinality>1</cardinality>
+          <commandScript>
+            <script>scripts/hawqmaster.py</script>
+            <scriptType>PYTHON</scriptType>
+            <timeout>1200</timeout>
+          </commandScript>
+          <dependencies>
+            <dependency>
+              <name>HDFS/NAMENODE</name>
+              <scope>cluster</scope>
+              <auto-deploy>
+                <enabled>false</enabled>
+              </auto-deploy>
+            </dependency>
+            <dependency>
+              <name>HDFS/HDFS_CLIENT</name>
+              <scope>host</scope>
+              <auto-deploy>
+                  <enabled>true</enabled>
+              </auto-deploy>
+            </dependency>
+          </dependencies>
+        </component>
+
+      <component>
+          <name>HAWQSTANDBY</name>
+          <displayName>HAWQ Standby Master</displayName>
+          <category>MASTER</category>
+          <cardinality>0-1</cardinality>
+          <commandScript>
+              <script>scripts/hawqstandby.py</script>
+              <scriptType>PYTHON</scriptType>
+              <timeout>600</timeout>
+          </commandScript>
+          <dependencies>
+            <dependency>
+                <name>HDFS/HDFS_CLIENT</name>
+                <scope>host</scope>
+                <auto-deploy>
+                    <enabled>true</enabled>
+                </auto-deploy>
+            </dependency>
+          </dependencies>
+      </component>
+
+        <component>
+          <name>HAWQSEGMENT</name>
+          <displayName>HAWQ Segment</displayName>
+          <category>SLAVE</category>
+          <cardinality>1+</cardinality>
+          <commandScript>
+            <script>scripts/hawqsegment.py</script>
+            <scriptType>PYTHON</scriptType>
+            <timeout>600</timeout>
+          </commandScript>
+           <dependencies>
+            <dependency>
+              <name>HDFS/DATANODE</name>
+              <scope>host</scope>
+              <auto-deploy>
+                <enabled>false</enabled>
+                <co-locate>HDFS/DATANODE</co-locate>
+              </auto-deploy>
+            </dependency>
+          </dependencies>
+        </component>
+      </components> 
+      <requiredServices>
+          <service>HDFS</service>
+      </requiredServices>
+
+      <osSpecifics>
+        <osSpecific>
+          <osFamily>any</osFamily>
+           <packages>
+            <package>
+              <name>hawq</name>
+            </package>
+          </packages>
+        </osSpecific>
+      </osSpecifics>
+
+      <commandScript>
+        <script>scripts/service_check.py</script>
+        <scriptType>PYTHON</scriptType>
+        <timeout>300</timeout>
+      </commandScript>
+
+      <configuration-dependencies>
+        <config-type>hawq-site</config-type>
+        <config-type>gpcheck-env</config-type>
+      </configuration-dependencies>
+    </service>
+
+  </services>
+</metainfo>
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/common.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/common.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/common.py
new file mode 100644
index 0000000..31ef2fa
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/common.py
@@ -0,0 +1,283 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+import re
+import os
+import time
+import crypt
+import filecmp
+from resource_management.libraries.resources.xml_config import XmlConfig
+from resource_management.core.resources.system import Execute, Directory, File
+from resource_management.core.logger import Logger
+from resource_management.core.system import System
+from resource_management.core.exceptions import Fail
+from resource_management.core.resources.accounts import Group, User
+from resource_management.core.source import Template
+import xml.etree.ElementTree as ET
+
+import utils
+import constants
+
+
+def update_bashrc(source_file, target_file):
+  """
+  Updates the hawq_user's .bashrc file with HAWQ env variables like
+  MASTER_DATA_DIRECTORY, PGHOST, PGPORT and PGUSER. 
+  And sources the greenplum_path file.
+  """
+  append_src_cmd = "echo 'source {0}' >> {1}".format(source_file, target_file)
+  src_cmd_exists = "grep 'source {0}' {1}".format(source_file, target_file)
+  Execute(append_src_cmd, user=constants.hawq_user, timeout=constants.default_exec_timeout, not_if=src_cmd_exists)
+
+
+def setup_user():
+  """
+  Creates HAWQ user home directory and sets up the correct ownership.
+  """
+  __create_hawq_user()
+  __set_home_dir_ownership()
+
+
+def __create_hawq_user():
+  """
+  Creates HAWQ user with default password and group.
+  """
+  import params
+  Group(constants.hawq_group, ignore_failures=True)
+
+  User(constants.hawq_user,
+       gid=constants.hawq_group,
+       password=crypt.crypt(constants.hawq_password, "salt"),
+       groups=[constants.hawq_group, params.user_group],
+       ignore_failures=True)
+
+
+def __set_home_dir_ownership():
+  """
+  Updates the HAWQ user home directory to be owned by gpadmin:gpadmin.
+  """
+  command = "chown -R {0}:{1} {2}".format(constants.hawq_user, constants.hawq_group, constants.hawq_home_dir)
+  Execute(command, timeout=constants.default_exec_timeout)
+
+
+def setup_common_configurations():
+  """
+  Sets up the config files common to master, standby and segment nodes.
+  """
+  import params
+
+  substituted_conf_dict = __substitute_hostnames_in_hawq_site()
+  XmlConfig("hawq-site.xml",
+            conf_dir=constants.hawq_config_dir,
+            configurations=substituted_conf_dict,
+            configuration_attributes=params.config['configuration_attributes']['hawq-site'],
+            owner=constants.hawq_user,
+            group=constants.hawq_group,
+            mode=0644)
+  __set_osparams()
+
+
+def __substitute_hostnames_in_hawq_site():
+  """
+  Temporary function to replace localhost with actual HAWQ component hostnames.
+  This function will be in place till the entire HAWQ plugin code along with the UI
+  changes are submitted to the trunk.
+  """
+  import params
+
+  LOCALHOST = "localhost"
+  
+  # in case there is no standby
+  hawqstandby_host_desired_value = params.hawqstandby_host if params.hawqstandby_host is not None else 'none' 
+  
+  substituted_hawq_site = params.hawq_site.copy()
+  hawq_site_property_map = {"hawq_master_address_host": params.hawqmaster_host,
+                            "hawq_standby_address_host": hawqstandby_host_desired_value,
+                            "hawq_rm_yarn_address": params.rm_host,
+                            "hawq_rm_yarn_scheduler_address": params.rm_host,
+                            "hawq_dfs_url": params.namenode_host
+                            }
+
+  for property, desired_value in hawq_site_property_map.iteritems():
+    if desired_value is not None:
+      # Replace localhost with required component hostname
+      substituted_hawq_site[property] = re.sub(LOCALHOST, desired_value, substituted_hawq_site[property])
+
+  return substituted_hawq_site
+
+
+def __set_osparams():
+  """
+  Updates parameters in sysctl.conf and limits.conf required by HAWQ.
+  """
+  # Create a temp scratchpad directory
+  utils.create_dir_as_hawq_user(constants.hawq_tmp_dir)
+
+  # Suse doesn't supports loading values from files in /etc/sysctl.d
+  # So we will have to directly edit the sysctl file
+  if System.get_instance().os_family == "suse":
+    # Update /etc/sysctl.conf
+    __update_sysctl_file_suse()
+  else:
+    # Update /etc/sysctl.d/hawq.conf
+    __update_sysctl_file()
+
+  __update_limits_file()
+
+
+def __update_limits_file():
+  """
+  Updates /etc/security/limits.d/hawq.conf file with the HAWQ parameters.
+  """
+  import params
+  # Ensure limits directory exists
+  Directory(constants.limits_conf_dir, recursive=True, owner=constants.root_user, group=constants.root_user)
+
+  # Generate limits for hawq user
+  limits_file_content = "#### HAWQ Limits Parameters  ###########\n"
+  for key, value in params.hawq_limits.iteritems():
+    if not __valid_input(value):
+      raise Exception("Value {0} for parameter {1} contains non-numeric characters which are not allowed (except whitespace), please fix the value and retry".format(value, key))
+    """
+    Content of the file to be written should be of the format
+    gpadmin soft nofile 290000
+    gpadmin hard nofile 290000
+    key used in the configuration is of the format soft_nofile, thus strip '_' and replace with 'space'
+    """
+    limits_file_content += "{0} {1} {2}\n".format(constants.hawq_user, re.sub("_", " ", key), value.strip())
+  File('{0}/{1}.conf'.format(constants.limits_conf_dir, constants.hawq_user), content=limits_file_content,
+       owner=constants.hawq_user, group=constants.hawq_group)
+
+
+def __valid_input(value):
+  """
+  Validate if input value contains number (whitespaces allowed), return true if found else false
+  """
+  return re.search("^ *[0-9][0-9 ]*$", value)
+
+
+def __convert_sysctl_dict_to_text():
+  """
+  Convert sysctl configuration dict to text with each property value pair separated on new line
+  """
+  import params
+  sysctl_file_content = "### HAWQ System Parameters ###########\n"
+  for key, value in params.hawq_sysctl.iteritems():
+    if not __valid_input(value):
+      raise Exception("Value {0} for parameter {1} contains non-numeric characters which are not allowed (except whitespace), please fix the value and retry".format(value, key))
+    sysctl_file_content += "{0} = {1}\n".format(key, value)
+  return sysctl_file_content
+
+
+def __update_sysctl_file():
+  """
+  Updates /etc/sysctl.d/hawq_sysctl.conf file with the HAWQ parameters on CentOS/RHEL.
+  """
+  # Ensure sys ctl sub-directory exists
+  Directory(constants.sysctl_conf_dir, recursive=True, owner=constants.root_user, group=constants.root_user)
+
+  # Generate temporary file with kernel parameters needed by hawq
+  File(constants.hawq_sysctl_tmp_file, content=__convert_sysctl_dict_to_text(), owner=constants.hawq_user,
+       group=constants.hawq_group)
+
+  is_changed = True
+  if os.path.exists(constants.hawq_sysctl_tmp_file) and os.path.exists(constants.hawq_sysctl_file):
+    is_changed = not filecmp.cmp(constants.hawq_sysctl_file, constants.hawq_sysctl_tmp_file)
+
+  if is_changed:
+    # Generate file with kernel parameters needed by hawq, only if something
+    # has been changed by user
+    Execute("cp -p {0} {1}".format(constants.hawq_sysctl_tmp_file, constants.hawq_sysctl_file))
+
+    # Reload kernel sysctl parameters from hawq file.
+    Execute("sysctl -e -p {0}".format(constants.hawq_sysctl_file), timeout=constants.default_exec_timeout)
+
+  # Wipe out temp file
+  File(constants.hawq_sysctl_tmp_file, action='delete')
+
+
+def __update_sysctl_file_suse():
+  """
+  Updates /etc/sysctl.conf file with the HAWQ parameters on SUSE.
+  """
+  # Backup file
+  backup_file_name = constants.sysctl_backup_file.format(str(int(time.time())))
+  try:
+    # Generate file with kernel parameters needed by hawq to temp file
+    File(constants.hawq_sysctl_tmp_file, content=__convert_sysctl_dict_to_text(), owner=constants.hawq_user,
+        group=constants.hawq_group)
+
+    sysctl_file_dict = utils.read_file_to_dict(constants.sysctl_suse_file)
+    sysctl_file_dict_original = sysctl_file_dict.copy()
+    hawq_sysctl_dict = utils.read_file_to_dict(constants.hawq_sysctl_tmp_file)
+
+    # Merge common system file with hawq specific file
+    sysctl_file_dict.update(hawq_sysctl_dict)
+
+    if sysctl_file_dict_original != sysctl_file_dict:
+      # Backup file
+      Execute("cp {0} {1}".format(constants.sysctl_suse_file, backup_file_name), timeout=constants.default_exec_timeout)
+      # Write merged properties to file
+      utils.write_dict_to_file(sysctl_file_dict, constants.sysctl_suse_file)
+      # Reload kernel sysctl parameters from /etc/sysctl.conf
+      Execute("sysctl -e -p", timeout=constants.default_exec_timeout)
+
+  except Exception as e:
+    Logger.error("Error occurred while updating sysctl.conf file, reverting the contents" + str(e))
+    Execute("cp {0} {1}".format(constants.sysctl_suse_file, constants.hawq_sysctl_tmp_file))
+    Execute("mv {0} {1}".format(backup_file_name, constants.sysctl_suse_file), timeout=constants.default_exec_timeout)
+    Logger.error("Please execute `sysctl -e -p` on the command line manually to reload the contents of file {0}".format(
+      constants.hawq_sysctl_tmp_file))
+    raise Fail("Failed to update sysctl.conf file ")
+
+
+def get_local_hawq_site_property(property_name):
+  """
+  Fetches the value of the property specified, from the local hawq-site.xml.
+  """
+  hawq_site_path = None
+  try:
+    hawq_site_path = os.path.join(constants.hawq_config_dir, "hawq-site.xml")
+    hawq_site_root = ET.parse(hawq_site_path).getroot()
+    for property in hawq_site_root.findall("property"):
+      for item in property:
+        if item.tag == 'name':
+          current_property_name = item.text.strip() if item and item.text else item.text
+        elif item.tag == 'value':
+          current_property_value = item.text.strip() if item and item.text else item.text
+      if property_name == current_property_name:
+          return current_property_value
+    raise #If property has not been found
+  except Exception:
+    raise Fail("Unable to read property {0} from local {1}".format(property_name, hawq_site_path))
+
+def validate_configuration():
+  """
+  Validates if YARN is present in the configuration when the user specifies YARN as HAWQ's resource manager.
+  """
+  import params
+
+  # At this point, hawq should be included.
+  if 'hawq-site' not in params.config['configurations']:
+    raise Fail("Configurations does not contain hawq-site. Please include HAWQ")
+
+  # If HAWQ is set to use YARN and YARN is not configured, error.
+  rm_type = params.config["configurations"]["hawq-site"].get("hawq_global_rm_type")
+  if rm_type == "yarn" and "yarn-site" not in params.config["configurations"]:
+    raise Fail("HAWQ is set to use YARN but YARN is not deployed. " + 
+               "hawq_global_rm_type property in hawq-site is set to 'yarn' but YARN is not configured. " + 
+               "Please deploy YARN before starting HAWQ or change the value of hawq_global_rm_type property to 'none'")

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/constants.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/constants.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/constants.py
new file mode 100644
index 0000000..b56fd59
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/constants.py
@@ -0,0 +1,61 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+import os
+
+MASTER = "master"
+STANDBY = "standby"
+SEGMENT = "segment"
+START = "start"
+INIT = "init"
+STOP = "stop"
+
+# Users
+root_user = "root"
+hawq_user = "gpadmin"
+hawq_group = hawq_user
+hawq_password = hawq_user
+
+# Directories
+hawq_home_dir = "/usr/local/hawq/"
+hawq_config_dir = "/usr/local/hawq/etc/"
+hawq_pid_dir = "/var/run/hawq/"
+hawq_tmp_dir = '/tmp/hawq/'
+hawq_user_home_dir = os.path.expanduser("~{0}".format(hawq_user))
+limits_conf_dir = "/etc/security/limits.d"
+sysctl_conf_dir = "/etc/sysctl.d"
+
+# Files
+hawq_slaves_file = os.path.join(hawq_config_dir, "slaves")
+hawq_user_bashrc_file = os.path.join(hawq_user_home_dir, ".bashrc")
+hawq_greenplum_path_file = os.path.join(hawq_home_dir, "greenplum_path.sh")
+hawq_hosts_file = "/tmp/hawq_hosts"
+hawq_check_file = os.path.join(hawq_config_dir, "gpcheck.cnf")
+sysctl_suse_file = "/etc/sysctl.conf"
+sysctl_backup_file = "/etc/sysctl.conf.backup.{0}"
+hawq_sysctl_filename = "hawq_sysctl.conf"
+hawq_sysctl_tmp_file = os.path.join(hawq_tmp_dir, hawq_sysctl_filename)
+hawq_sysctl_file = os.path.join(sysctl_conf_dir, hawq_sysctl_filename)
+postmaster_opts_filename = "postmaster.opts"
+postmaster_pid_filename = "postmaster.pid"
+
+# Smoke check table
+smoke_check_table_name = "ambari_hawq_smoke_test"
+
+# Timeouts
+default_exec_timeout = 600
+hawq_operation_exec_timeout = 900

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/hawqmaster.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/hawqmaster.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/hawqmaster.py
new file mode 100644
index 0000000..4443264
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/hawqmaster.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+from resource_management import Script
+from resource_management.libraries.functions.check_process_status import check_process_status
+
+import master_helper
+import common
+import constants
+
+class HawqMaster(Script):
+  """
+  Contains the interface definitions for methods like install, 
+  start, stop, status, etc. for the HAWQ Master
+  """
+
+  def install(self, env):
+    self.install_packages(env)
+    self.configure(env)
+
+  def configure(self, env):
+    import params
+    env.set_params(params)
+    env.set_params(constants)
+    master_helper.configure_master()
+
+  def start(self, env):
+    self.configure(env)
+    common.validate_configuration()
+    master_helper.start_master()
+
+  def stop(self, env):
+    master_helper.stop_master()
+
+  def status(self, env):
+    from hawqstatus import get_pid_file
+    check_process_status(get_pid_file())
+
+if __name__ == "__main__":
+  HawqMaster().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/hawqsegment.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/hawqsegment.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/hawqsegment.py
new file mode 100644
index 0000000..b4be502
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/hawqsegment.py
@@ -0,0 +1,102 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+import os
+from resource_management import Script
+from resource_management.core.resources.system import Execute
+from resource_management.libraries.functions.check_process_status import check_process_status
+
+import utils
+import common
+import constants
+
+class HawqSegment(Script):
+  """
+  Contains the interface definitions for methods like install, 
+  start, stop, status, etc. for the HAWQ Segment
+  """
+
+  def install(self, env):
+    self.install_packages(env)
+    self.configure(env)
+
+  def configure(self, env):
+    import params
+
+    env.set_params(params)
+    env.set_params(constants)
+    common.setup_user()
+    common.setup_common_configurations()
+    common.update_bashrc(constants.hawq_greenplum_path_file, constants.hawq_user_bashrc_file)
+
+
+  def __start_segment(self):
+    import params
+    return utils.exec_hawq_operation(
+          constants.START, 
+          "{0} -a".format(constants.SEGMENT), 
+          not_if=utils.chk_postgres_status_cmd(params.hawq_segment_address_port))
+
+  def start(self, env):
+    self.configure(env)
+    common.validate_configuration()
+
+    if self.__is_segment_initialized():
+      self.__start_segment()
+      return
+
+    # Initialization also starts process.
+    self.__init_segment()
+
+
+  def stop(self, env):
+    import params
+
+    utils.exec_hawq_operation(constants.STOP, "{0} -a".format(constants.SEGMENT), only_if=utils.chk_postgres_status_cmd(
+                                params.hawq_segment_address_port))
+
+
+  def status(self, env):
+    from hawqstatus import get_pid_file
+    check_process_status(get_pid_file())
+
+
+  @staticmethod
+  def __init_segment():
+    import params
+
+    # Create segment directories
+    utils.create_dir_as_hawq_user(params.hawq_segment_dir)
+    utils.create_dir_as_hawq_user(params.hawq_segment_temp_dir.split(','))
+
+    Execute("chown {0}:{1} {2}".format(constants.hawq_user, constants.hawq_group, os.path.dirname(params.hawq_segment_dir)),
+            user=constants.root_user, timeout=constants.default_exec_timeout)
+
+    # Initialize hawq segment
+    utils.exec_hawq_operation(constants.INIT, "{0} -a -v".format(constants.SEGMENT))
+
+  def __is_segment_initialized(self):
+    """
+    Check whether the HAWQ Segment is initialized
+    """
+    import params
+    return os.path.exists(os.path.join(params.hawq_segment_dir, constants.postmaster_opts_filename))
+
+
+if __name__ == "__main__":
+  HawqSegment().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/hawqstandby.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/hawqstandby.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/hawqstandby.py
new file mode 100644
index 0000000..d8254f9
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/hawqstandby.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+from resource_management import Script
+from resource_management.libraries.functions.check_process_status import check_process_status
+
+import master_helper
+import common
+import constants
+
+class HawqStandby(Script):
+  """
+  Contains the interface definitions for methods like install, 
+  start, stop, status, etc. for the HAWQ Standby Master
+  """
+
+  def install(self, env):
+    self.install_packages(env)
+    self.configure(env)
+
+  def configure(self, env):
+    import params
+    env.set_params(params)
+    env.set_params(constants)
+    master_helper.configure_master()
+
+  def start(self, env):
+    self.configure(env)
+    common.validate_configuration()
+    master_helper.start_master()
+
+  def stop(self, env):
+    master_helper.stop_master()
+
+  def status(self, env):
+    from hawqstatus import get_pid_file
+    check_process_status(get_pid_file())
+
+  def activatestandby(self, env):
+    pass
+
+if __name__ == "__main__":
+    HawqStandby().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/hawqstatus.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/hawqstatus.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/hawqstatus.py
new file mode 100644
index 0000000..26dfdd0
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/hawqstatus.py
@@ -0,0 +1,64 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import os
+
+from resource_management import Script
+from resource_management.core.resources.system import File
+from resource_management.core.exceptions import Fail
+
+import utils
+import common
+import constants
+
+
+def get_pid_file():
+  """
+  Fetches the pid file, which will be used to get the status of the HAWQ Master, Standby
+  or Segments
+  """
+
+  config = Script.get_config()
+  
+  component_name = config['componentName']
+  component = "master" if component_name in ["HAWQMASTER", "HAWQSTANDBY"] else "segment"
+  hawq_pid_file = os.path.join(constants.hawq_pid_dir, "hawq-{0}.pid".format(component))
+
+  File(hawq_pid_file, action='delete')
+  utils.create_dir_as_hawq_user(constants.hawq_pid_dir)
+
+  #Get hawq_master_directory or hawq_segment_directory value from hawq-site.xml depending 
+  #on the component
+  hawq_site_directory_property = "hawq_{0}_directory".format(component)
+  
+  #hawq-site content from Ambari server will not be available when the 
+  #command type is STATUS_COMMAND. Hence, reading it directly from the local file
+  postmaster_pid_file = os.path.join(common.get_local_hawq_site_property(
+      hawq_site_directory_property), constants.postmaster_pid_filename)
+
+  pid = ""
+  if os.path.exists(postmaster_pid_file):
+    with open(postmaster_pid_file, 'r') as fh:
+      pid = fh.readline().strip()
+
+  if not pid:
+    raise Fail("Failed to fetch pid from {0}".format(postmaster_pid_file))
+
+  File(hawq_pid_file, content=pid, owner=constants.hawq_user, group=constants.hawq_user)
+
+  return hawq_pid_file

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/master_helper.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/master_helper.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/master_helper.py
new file mode 100644
index 0000000..35f5112
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/master_helper.py
@@ -0,0 +1,194 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+import os
+from resource_management.core.resources.system import File, Execute
+from resource_management.core.source import Template
+from resource_management.core.exceptions import Fail
+from resource_management.core.logger import Logger
+
+import utils
+import common
+import constants
+
+def __setup_master_specific_conf_files():
+  """
+  Sets up config files only applicable for HAWQ Master and Standby nodes
+  """
+  import params
+
+  File(constants.hawq_check_file, content=params.gpcheck_content, owner=constants.hawq_user, group=constants.hawq_group,
+      mode=0644)
+
+  File(constants.hawq_slaves_file, content=Template("slaves.j2"), owner=constants.hawq_user, group=constants.hawq_group,
+       mode=0644)
+
+  File(constants.hawq_hosts_file, content=Template("hawq-hosts.j2"), owner=constants.hawq_user, group=constants.hawq_group,
+       mode=0644)
+
+
+def __setup_passwordless_ssh():
+  """
+  Exchanges ssh keys to setup passwordless ssh for the hawq_user between the HAWQ Master and the HAWQ Segment nodes
+  """
+  utils.exec_hawq_operation("ssh-exkeys", "-f {0} -p {1}".format(constants.hawq_hosts_file, constants.hawq_password))
+
+  File(constants.hawq_hosts_file, action='delete')
+
+
+def __setup_hawq_user_profile():
+  """
+  Sets up the ENV variables for hawq_user as a convenience for the command line users
+  """
+  hawq_profile_file = os.path.join(os.path.expanduser("~{0}".format(constants.hawq_user)), ".hawq-profile.sh")
+  File(hawq_profile_file, content=Template("hawq-profile.sh.j2"), owner=constants.hawq_user, group=constants.hawq_group)
+  common.update_bashrc(hawq_profile_file, constants.hawq_user_bashrc_file)
+
+
+def configure_master():
+  """
+  Configures the master node after rpm install
+  """
+  common.setup_user()
+  common.setup_common_configurations()
+  __setup_master_specific_conf_files()
+  __setup_passwordless_ssh()
+  __setup_hawq_user_profile()
+  __create_local_dirs()
+
+
+def __create_local_dirs():
+  """
+  Creates the required local directories for HAWQ 
+  """
+  import params
+  # Create Master directories
+  utils.create_dir_as_hawq_user(params.hawq_master_dir)
+  utils.create_dir_as_hawq_user(params.hawq_master_temp_dir.split(','))
+
+  Execute("chown {0}:{1} {2}".format(constants.hawq_user, constants.hawq_group, os.path.dirname(params.hawq_master_dir)),
+          user=constants.root_user, timeout=constants.default_exec_timeout)
+
+  Execute("chmod 700 {0}".format(params.hawq_master_dir), user=constants.root_user, timeout=constants.default_exec_timeout)
+
+
+def __create_hdfs_dirs():
+  """
+  Creates the required HDFS directories for HAWQ
+  """
+  import params
+  params.HdfsResource(params.hawq_hdfs_data_dir, type="directory", action="create_on_execute", owner=constants.hawq_user, group=constants.hawq_group, mode=0755)
+  params.HdfsResource(None, action="execute")
+
+
+def __init_active():
+  """
+  Initializes the active master
+  """
+  __create_hdfs_dirs()
+  utils.exec_hawq_operation(constants.INIT, "{0} -a -v".format(constants.MASTER))
+
+
+def __init_standby():
+  """
+  Initializes the HAWQ Standby Master
+  """
+  utils.exec_hawq_operation(constants.INIT, "{0} -a -v".format(constants.STANDBY))
+
+
+def __get_component_name():
+  """
+  Identifies current node as either HAWQ Master or HAWQ Standby Master
+  """
+  return constants.MASTER if __is_active_master() else constants.STANDBY
+
+
+def __start_local_master():
+  """
+  Starts HAWQ Master or HAWQ Standby Master component on the host
+  """
+  import params
+  utils.exec_hawq_operation(
+        constants.START, 
+        "{0} -a".format(__get_component_name()), 
+        not_if=utils.chk_postgres_status_cmd(params.hawq_master_address_port))
+
+  
+def __is_local_initialized():
+  """
+  Checks if the local node has been initialized
+  """
+  import params
+  return os.path.exists(os.path.join(params.hawq_master_dir, constants.postmaster_opts_filename))
+
+
+def __get_standby_host():
+  """
+  Returns the name of the HAWQ Standby Master host from hawq-site.xml, or None if no standby is configured
+  """
+  standby_host = common.get_local_hawq_site_property("hawq_standby_address_host")
+  return None if standby_host is None or standby_host.lower() == 'none' else standby_host
+
+
+def __is_standby_initialized():
+  """
+  Returns True if HAWQ Standby Master is initialized, False otherwise
+  """
+  import params
+  
+  file_path = os.path.join(params.hawq_master_dir, constants.postmaster_opts_filename)
+  (retcode, _, _) = utils.exec_ssh_cmd(__get_standby_host(), "[ -f {0} ]".format(file_path))
+  return retcode == 0
+
+
+def start_master():
+  """
+  Initializes HAWQ Master/Standby if not already done and starts them
+  """
+  import params
+
+  if not params.hostname in [params.hawqmaster_host, params.hawqstandby_host]:
+    Fail("Host should be either active Hawq master or Hawq standby.")
+
+  is_active_master = __is_active_master()
+  if __is_local_initialized():
+    __start_local_master()
+
+  elif is_active_master:
+    __init_active()
+
+  if is_active_master and __get_standby_host() is not None and not __is_standby_initialized():
+    __init_standby()
+
+
+def stop_master():
+  """
+  Stops the HAWQ Master/Standby
+  """
+  import params
+  utils.exec_hawq_operation(
+                constants.STOP,
+                "{0} -a".format(__get_component_name()),
+                only_if=utils.chk_postgres_status_cmd(params.hawq_master_address_port))
+
+
+def __is_active_master():
+  """
+  Finds if this node is the active master
+  """
+  import params
+  return params.hostname == common.get_local_hawq_site_property("hawq_master_address_host")

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/params.py
new file mode 100644
index 0000000..fb449b9
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/params.py
@@ -0,0 +1,92 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import functools
+from resource_management import Script
+from resource_management.libraries.functions.default import default
+from resource_management.libraries.resources.hdfs_resource import HdfsResource
+
+config = Script.get_config()
+
+
+def __get_component_host(component):
+  """
+  Returns the first host where the given component is deployed, None if the component is not deployed
+  """
+  component_host = None
+  if component in config['clusterHostInfo'] and len(config['clusterHostInfo'][component]) > 0:
+    component_host = config['clusterHostInfo'][component][0]
+  return component_host
+
+
+def __get_namenode_host():
+  """
+  Gets the namenode host; active namenode in case of HA
+  """
+  namenode_host = __get_component_host('namenode_host')
+  
+  # hostname of the active HDFS HA Namenode (only used when HA is enabled)
+  dfs_ha_namenode_active = default('/configurations/hadoop-env/dfs_ha_initial_namenode_active', None)
+  if dfs_ha_namenode_active is not None:
+    namenode_host = dfs_ha_namenode_active
+  return namenode_host
+
+
+hostname = config['hostname']
+
+# Users and Groups
+hdfs_superuser = config['configurations']['hadoop-env']['hdfs_user']
+user_group = config['configurations']['cluster-env']['user_group']
+
+# HAWQ Hostnames
+hawqmaster_host = __get_component_host('hawqmaster_hosts')
+hawqstandby_host = __get_component_host('hawqstandby_hosts')
+hawqsegment_hosts = default('/clusterHostInfo/hawqsegment_hosts', [])
+
+# HDFS
+hdfs_site = config['configurations']['hdfs-site']
+default_fs = config['configurations']['core-site']['fs.defaultFS']
+
+# HDFSResource partial function
+HdfsResource = functools.partial(HdfsResource, user=hdfs_superuser, hdfs_site=hdfs_site, default_fs=default_fs)
+
+namenode_host= __get_namenode_host()
+
+# YARN
+# Note: YARN is not mandatory for HAWQ. It is required only when the users set HAWQ to use YARN as resource manager
+rm_host = __get_component_host('rm_host')
+
+# Config files
+gpcheck_content = config['configurations']['gpcheck-env']['content']
+# database user limits
+hawq_limits = config['configurations']['hawq-limits-env']
+# sysctl parameters
+hawq_sysctl = config['configurations']['hawq-sysctl-env']
+
+hawq_site = config['configurations']['hawq-site']
+hawq_master_dir = hawq_site.get('hawq_master_directory')
+hawq_segment_dir = hawq_site.get('hawq_segment_directory')
+hawq_master_temp_dir = hawq_site.get('hawq_master_temp_directory')
+hawq_segment_temp_dir = hawq_site.get('hawq_segment_temp_directory')
+# Extract hawq hdfs directory from hdfs url. Ex: /hawq/hawq_default from
+# host:8080/hawq/hawq_default
+hawq_hdfs_data_dir = "/{0}".format(hawq_site.get('hawq_dfs_url').split('/', 1)[1])
+hawq_master_address_port = hawq_site.get('hawq_master_address_port')
+hawq_segment_address_port = hawq_site.get('hawq_segment_address_port')
+
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/service_check.py
new file mode 100644
index 0000000..7a99f5e
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/service_check.py
@@ -0,0 +1,102 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+import common
+import constants
+from utils import exec_psql_cmd, exec_ssh_cmd
+from resource_management.libraries.script import Script
+from resource_management.core.exceptions import Fail
+from resource_management.core.logger import Logger
+
+import sys
+
+class HAWQServiceCheck(Script):
+  """
+  Runs a set of simple HAWQ tests to verify if the service has been setup correctly
+  """
+
+  def __init__(self):
+    self.active_master_host = common.get_local_hawq_site_property("hawq_master_address_host")
+
+
+  def service_check(self, env):
+    Logger.info("Starting HAWQ service checks..")
+    # All the tests are run on the active_master_host using ssh irrespective of the node on which service check
+    # is executed by Ambari
+    try:
+      self.check_state()
+      self.drop_table()
+      self.create_table()
+      self.insert_data()
+      self.query_data()
+      self.check_data_correctness()
+    except:
+      Logger.error("Service check failed")
+      sys.exit(1)
+    finally:
+      self.drop_table()
+
+    Logger.info("Service check completed successfully")
+
+
+  def drop_table(self):
+    Logger.info("Dropping {0} table if exists".format(constants.smoke_check_table_name))
+    sql_cmd = "drop table if exists {0}".format(constants.smoke_check_table_name)
+    exec_psql_cmd(sql_cmd, self.active_master_host)
+
+
+  def create_table(self):
+    Logger.info("Creating table {0}".format(constants.smoke_check_table_name))
+    sql_cmd = "create table {0} (col1 int) distributed randomly".format(constants.smoke_check_table_name)
+    exec_psql_cmd(sql_cmd, self.active_master_host)
+
+
+  def insert_data(self):
+    Logger.info("Inserting data to table {0}".format(constants.smoke_check_table_name))
+    sql_cmd = "insert into {0} select * from generate_series(1,10)".format(constants.smoke_check_table_name)
+    exec_psql_cmd(sql_cmd, self.active_master_host)
+
+
+  def query_data(self):
+    Logger.info("Querying data from table {0}".format(constants.smoke_check_table_name))
+    sql_cmd = "select * from {0}".format(constants.smoke_check_table_name)
+    exec_psql_cmd(sql_cmd, self.active_master_host)
+
+
+  def check_data_correctness(self):
+    expected_data = "55"
+    Logger.info("Validating data inserted, finding sum of all the inserted entries. Expected output: {0}".format(expected_data))
+    sql_cmd = "select sum(col1) from {0}".format(constants.smoke_check_table_name)
+    _, stdout, _ = exec_psql_cmd(sql_cmd, self.active_master_host, tuples_only=False)
+    if expected_data != stdout.strip():
+      Logger.error("Incorrect data returned. Expected Data: {0} Actual Data: {1}".format(expected_data, stdout))
+      raise Fail("Incorrect data returned.")
+
+
+  def check_state(self):
+    import params
+    command = "source {0} && hawq state -d {1}".format(constants.hawq_greenplum_path_file, params.hawq_master_dir)
+    Logger.info("Executing hawq status check..")
+    (retcode, out, err) = exec_ssh_cmd(self.active_master_host, command)
+    if retcode:
+      Logger.error("hawq state command returned non-zero result: {0}. Out: {1} Error: {2}".format(retcode, out, err))
+      raise Fail("Unexpected result of hawq state command.")
+    Logger.info("Output of command:\n{0}".format(str(out) + "\n"))
+
+
+if __name__ == "__main__":
+  HAWQServiceCheck().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/utils.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/utils.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/utils.py
new file mode 100644
index 0000000..da51c19
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/scripts/utils.py
@@ -0,0 +1,108 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+import subprocess
+
+from resource_management.core.resources.system import Execute, Directory
+from resource_management.core.exceptions import Fail
+from resource_management.core.logger import Logger
+
+import constants
+
+def chk_postgres_status_cmd(port):
+  """
+  Checks if the postgres port is occupied
+  """
+  return "netstat -tupln | egrep ':{0}\s' | egrep postgres".format(port)
+
+
+def create_dir_as_hawq_user(directory):
+  """
+  Creates directories with hawq_user and hawq_group (defaults to gpadmin:gpadmin)
+  """
+  Directory(directory, recursive=True, owner=constants.hawq_user, group=constants.hawq_group)
+
+
+def exec_hawq_operation(operation, option, not_if=None, only_if=None, logoutput=True):
+  """
+  Sets up execution environment and runs a given command as HAWQ user
+  """
+  hawq_cmd = "source {0} && hawq {1} {2}".format(constants.hawq_greenplum_path_file, operation, option)
+  Execute(
+        hawq_cmd,
+        user=constants.hawq_user,
+        timeout=constants.hawq_operation_exec_timeout,
+        not_if=not_if,
+        only_if=only_if,
+        logoutput=logoutput)
+
+
+def read_file_to_dict(file_name):
+  """ 
+  Converts a file with key=value format to dictionary
+  """
+  with open(file_name, "r") as fh:
+    lines = fh.readlines()
+    lines = [item for item in lines if '=' in item]
+    result_dict = dict(item.split("=") for item in lines)
+  return result_dict
+
+
+def write_dict_to_file(source_dict, dest_file):
+  """
+  Writes a dictionary into a file with key=value format
+  """
+  with open(dest_file, "w") as fh:
+    for property_key, property_value in source_dict.items():
+      if property_value is None:
+        fh.write(property_key + "\n")
+      else:
+        fh.write("{0}={1}\n".format(property_key, property_value))
+
+
+def exec_ssh_cmd(hostname, cmd):
+  """
+  Runs the command on the remote host as gpadmin user
+  """
+  import params
+  # Only gpadmin should be allowed to run command via ssh, thus not exposing user as a parameter
+  if params.hostname != hostname:
+    cmd = "su - {0} -c 'ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null {1} \"{2} \" '".format(constants.hawq_user, hostname, cmd)
+  else:
+    cmd = "su - {0} -c \"{1}\"".format(constants.hawq_user, cmd)
+  Logger.info("Command executed: {0}".format(cmd))
+  process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
+  (stdout, stderr) = process.communicate()
+  return process.returncode, stdout, stderr
+
+
+def exec_psql_cmd(command, host, db="template1", tuples_only=True):
+  """
+  Sets up execution environment and runs the HAWQ queries
+  """
+  src_cmd = "source {0}".format(constants.hawq_greenplum_path_file)
+  if tuples_only:
+    cmd = src_cmd + " && psql -d {0} -c \\\"{1};\\\"".format(db, command)
+  else:
+    cmd = src_cmd + " && psql -t -d {0} -c \\\"{1};\\\"".format(db, command)
+  retcode, out, err = exec_ssh_cmd(host, cmd)
+  if retcode:
+    Logger.error("SQL command executed failed: {0}\nReturncode: {1}\nStdout: {2}\nStderr: {3}".format(cmd, retcode, out, err))
+    raise Fail("SQL command executed failed.")
+
+  Logger.info("Output:\n{0}".format(out))
+  return retcode, out, err

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/templates/hawq-hosts.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/templates/hawq-hosts.j2 b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/templates/hawq-hosts.j2
new file mode 100644
index 0000000..9bdaa75
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/templates/hawq-hosts.j2
@@ -0,0 +1,5 @@
+{{hawqmaster_host}}
+{{hawqstandby_host}}
+{% for host in hawqsegment_hosts %}
+{{host}}
+{% endfor %}

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/templates/hawq-profile.sh.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/templates/hawq-profile.sh.j2 b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/templates/hawq-profile.sh.j2
new file mode 100644
index 0000000..e28e416
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/templates/hawq-profile.sh.j2
@@ -0,0 +1,8 @@
+######################################
+##### HAWQ env #######################
+######################################
+source {{hawq_greenplum_path_file}}
+export MASTER_DATA_DIRECTORY={{hawq_master_dir}}
+export PGPORT={{hawq_master_address_port}}
+export PGHOST={{hawqmaster_host}}
+export PGUSER={{hawq_user}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/templates/slaves.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/templates/slaves.j2 b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/templates/slaves.j2
new file mode 100644
index 0000000..591d92b
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0.0/package/templates/slaves.j2
@@ -0,0 +1,3 @@
+{% for host in hawqsegment_hosts %}
+{{host}}
+{% endfor %}

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/configuration/gpcheck-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/configuration/gpcheck-env.xml b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/configuration/gpcheck-env.xml
deleted file mode 100755
index a61a34f..0000000
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/configuration/gpcheck-env.xml
+++ /dev/null
@@ -1,86 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-
-<configuration>
-  <!-- gpcheck.cnf -->
-    <property>
-      <name>content</name>
-      <display-name>Content</display-name>
-      <description>Contents of the configuration file /usr/local/hawq/etc/gpcheck.cnf. This file is used by 'hawq check' command, which can be run manually by gpadmin user on the HAWQ master host. This command validates the system parameters and HDFS parameters mentioned in this file to ensure optimal HAWQ operation.</description>
-        <value>
-[global]
-configfile_version = 4
-
-[linux.mount]
-mount.points = /
-
-[linux.sysctl]
-sysctl.kernel.shmmax = 500000000
-sysctl.kernel.shmmni = 4096
-sysctl.kernel.shmall = 4000000000
-sysctl.kernel.sem = 250 512000 100 2048
-sysctl.kernel.sysrq = 1
-sysctl.kernel.core_uses_pid = 1
-sysctl.kernel.msgmnb = 65536
-sysctl.kernel.msgmax = 65536
-sysctl.kernel.msgmni = 2048
-sysctl.net.ipv4.tcp_syncookies = 0
-sysctl.net.ipv4.ip_forward = 0
-sysctl.net.ipv4.conf.default.accept_source_route = 0
-sysctl.net.ipv4.tcp_tw_recycle = 1
-sysctl.net.ipv4.tcp_max_syn_backlog = 200000
-sysctl.net.ipv4.conf.all.arp_filter = 1
-sysctl.net.ipv4.ip_local_port_range = 1281 65535
-sysctl.net.core.netdev_max_backlog = 200000
-sysctl.vm.overcommit_memory = 2
-sysctl.fs.nr_open = 3000000
-sysctl.kernel.threads-max = 798720
-sysctl.kernel.pid_max = 798720
-# increase network
-sysctl.net.core.rmem_max = 2097152
-sysctl.net.core.wmem_max = 2097152
-
-[linux.limits]
-soft.nofile = 2900000
-hard.nofile = 2900000
-soft.nproc  = 131072
-hard.nproc  = 131072
-
-[linux.diskusage]
-diskusage.monitor.mounts = /
-diskusage.monitor.usagemax = 90%
-
-[hdfs]
-dfs.mem.namenode.heap = 40960
-dfs.mem.datanode.heap = 6144
-# in hdfs-site.xml
-dfs.support.append = true
-dfs.client.enable.read.from.local = true
-dfs.block.local-path-access.user = gpadmin
-dfs.datanode.max.transfer.threads = 40960
-dfs.client.socket-timeout = 300000000
-dfs.datanode.socket.write.timeout = 7200000
-dfs.namenode.handler.count = 60
-ipc.server.handler.queue.size = 3300
-dfs.datanode.handler.count = 60
-ipc.client.connection.maxidletime = 3600000
-dfs.namenode.accesstime.precision = -1
-    </value>
-  </property>
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/configuration/hawq-limits-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/configuration/hawq-limits-env.xml b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/configuration/hawq-limits-env.xml
deleted file mode 100644
index d8917e6..0000000
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/configuration/hawq-limits-env.xml
+++ /dev/null
@@ -1,46 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<configuration>
-    <property>
-        <display-name>soft nofile</display-name>
-        <name>soft_nofile</name>
-        <value>2900000</value>
-        <description>Soft limit for the number of file handles or open files for user gpadmin. Value is set in file /etc/security/limits.d/gpadmin.conf</description>
-    </property>
-
-    <property>
-        <display-name>hard nofile</display-name>
-        <name>hard_nofile</name>
-        <value>2900000</value>
-        <description>Hard limit for the number of file handles or open files for user gpadmin. Value is set in /etc/security/limits.d/gpadmin.conf</description>
-    </property>
-
-    <property>
-        <display-name>soft nproc</display-name>
-        <name>soft_nproc</name>
-        <value>131072</value>
-        <description>Soft limit for the maximum number of processes for user gpadmin. Value is set in /etc/security/limits.d/gpadmin.conf</description>
-    </property>
-
-    <property>
-        <display-name>hard nproc</display-name>
-        <name>hard_nproc</name>
-        <value>131072</value>
-        <description>Hard limit for the maximum number of processes for user gpadmin. Value is set in /etc/security/limits.d/gpadmin.conf</description>
-    </property>
-</configuration>


[2/3] ambari git commit: Revert "AMBARI-13725: HAWQ and PXF to support 3 digit versioning instead of 4. (jaoki)" Reverted due to rat check failures. This reverts commit 5dac27bef4a89a049c7e77f49828a20370d0b518.

Posted by yu...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/configuration/hawq-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/configuration/hawq-site.xml b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/configuration/hawq-site.xml
deleted file mode 100644
index 41b10dc..0000000
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/configuration/hawq-site.xml
+++ /dev/null
@@ -1,178 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<configuration>
-  <property>
-    <name>hawq_master_address_host</name>
-    <display-name>HAWQ Master</display-name>
-    <value>localhost</value>
-    <description>The host name of HAWQ master.</description>
-  </property>
-
-  <property>
-    <name>hawq_standby_address_host</name>
-    <display-name>HAWQ Standby Master</display-name>
-    <value>localhost</value>
-    <description>The host name of HAWQ standby.</description>
-  </property>
-
-  <property>
-    <name>hawq_master_address_port</name>
-    <display-name>HAWQ Master Port</display-name>
-    <value>5432</value>
-    <description>The port of HAWQ master.</description>
-  </property>
-
-  <property>
-    <name>hawq_segment_address_port</name>
-    <display-name>HAWQ Segment Port</display-name>
-    <value>40000</value>
-    <description>The port of HAWQ segment.</description>
-  </property>
-
-  <property>
-    <name>hawq_dfs_url</name>
-    <display-name>HAWQ DFS URL</display-name>
-    <value>localhost:8020/hawq_default</value>
-    <description>URL for accessing HDFS.</description>
-  </property>
-
-  <property>
-    <name>hawq_master_directory</name>
-    <display-name>HAWQ Master Directory</display-name>
-    <value>/data/hawq/master</value>
-    <description>The directory of HAWQ master.</description>
-  </property>
-
-  <property>
-    <name>hawq_segment_directory</name>
-    <display-name>HAWQ Segment Directory</display-name>
-    <value>/data/hawq/segment</value>
-    <description>The directory of HAWQ segment.</description>
-  </property> 
-
-  <property>
-    <name>hawq_master_temp_directory</name>
-    <display-name>HAWQ Master Temp Directory</display-name>
-    <value>/tmp</value>
-    <description>The temporary directory reserved for HAWQ master.</description>
-  </property>
-
-  <property>
-    <name>hawq_segment_temp_directory</name>
-    <display-name>HAWQ Segment Temp Directory</display-name>
-    <value>/tmp</value>
-    <description>The temporary directory reserved for HAWQ segment.</description>
-  </property>
-
-  <!-- HAWQ resource manager parameters -->
-  <property>
-    <name>hawq_global_rm_type</name>
-    <value>none</value>
-    <description>The resource manager type to start for allocating resource. 
-      'none' means HAWQ resource manager exclusively uses whole
-      cluster; 'yarn' means HAWQ resource manager contacts YARN
-      resource manager to negotiate resource.
-    </description>
-  </property>
-
-  <property>
-    <name>hawq_rm_memory_limit_perseg</name>
-    <value>64GB</value>
-    <description>The limit of memory usage in a HAWQ segment when 
-      hawq_global_rm_type is set 'none'.
-    </description>
-  </property>
-
-  <property>
-    <name>hawq_rm_nvcore_limit_perseg</name>
-    <value>16</value>
-    <description>The limit of virtual core usage in a HAWQ segment when 
-      hawq_global_rm_type is set 'none'.
-    </description>
-  </property>
-
-  <property>
-    <name>hawq_rm_yarn_address</name>
-    <value>localhost:8032</value>
-    <description>The address of YARN resource manager server.</description>
-  </property>
-
-  <property>
-    <name>hawq_rm_yarn_scheduler_address</name>
-    <value>localhost:8030</value>
-    <description>The address of YARN scheduler server.</description>
-  </property>
-
-  <property>
-    <name>hawq_rm_yarn_queue_name</name>
-    <value>default</value>
-    <description>The YARN queue name to register HAWQ resource manager.</description>
-  </property>
-
-  <property>
-    <name>hawq_rm_yarn_app_name</name>
-    <value>hawq</value>
-    <description>The application name to register HAWQ resource manager in YARN.</description>
-  </property>
-
-  <property>
-    <name>hawq_re_cpu_enable</name>
-    <value>false</value>
-  </property>
-
-  <property>
-    <name>hawq_re_cgroup_mount_point</name>
-    <value>/sys/fs/cgroup</value>
-  </property>
-
-  <property>
-    <name>hawq_re_cgroup_hierarchy_name</name>
-    <value>hadoop-yarn</value>
-  </property>
-
-  <property>
-    <name>hawq_re_cleanup_period</name>
-    <value>180</value>
-  </property>
-
-  <property>
-    <name>hawq_re_cpu_weight</name>
-    <value>1024.0</value>
-  </property>
-
-  <property>
-    <name>hawq_re_vcore_pcore_ratio</name>
-    <value>1.0</value>
-  </property>
-
-  <property>
-    <name>hawq_resourcemanager_master_address_domainsocket_port</name>
-    <value>5436</value>
-  </property>
-
-  <property>
-    <name>hawq_rm_master_port</name>
-    <value>5437</value>
-  </property>
-
-  <property>
-    <name>hawq_rm_segment_port</name>
-    <value>5438</value>
-  </property>
-
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/configuration/hawq-sysctl-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/configuration/hawq-sysctl-env.xml b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/configuration/hawq-sysctl-env.xml
deleted file mode 100644
index 32ae5a5..0000000
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/configuration/hawq-sysctl-env.xml
+++ /dev/null
@@ -1,247 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<configuration>
-  <property>
-    <name>kernel.shmmax</name>
-    <value>500000000</value>
-    <description>Maximum size in bytes of a single shared memory segment that a Linux process can allocate in its
-      virtual address space</description>
-    <value-attributes>
-      <overridable>false</overridable>
-      <editable-only-at-install>true</editable-only-at-install>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>kernel.shmmni</name>
-    <value>4096</value>
-    <description>System wide maximum number of shared memory segments</description>
-    <value-attributes>
-      <overridable>false</overridable>
-      <editable-only-at-install>true</editable-only-at-install>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>kernel.shmall</name>
-    <value>4000000000</value>
-    <description>Total amount of shared memory pages that can be used system wide</description>
-    <value-attributes>
-      <overridable>false</overridable>
-      <editable-only-at-install>true</editable-only-at-install>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>kernel.sem</name>
-    <value>250 512000 100 2048</value>
-    <description>Parameter to define semaphore related values</description>
-    <value-attributes>
-      <overridable>false</overridable>
-      <editable-only-at-install>true</editable-only-at-install>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>kernel.sysrq</name>
-    <value>1</value>
-    <description>Enable(1)/Disable(0) functions of sysrq</description>
-    <value-attributes>
-      <overridable>false</overridable>
-      <editable-only-at-install>true</editable-only-at-install>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>kernel.core_uses_pid</name>
-    <value>1</value>
-    <description>Enable appending process id to the name of core dump file. Ex: core.PID</description>
-    <value-attributes>
-      <overridable>false</overridable>
-      <editable-only-at-install>true</editable-only-at-install>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>kernel.msgmnb</name>
-    <value>65536</value>
-    <description>Default maximum size of a message in bytes</description>
-    <value-attributes>
-      <overridable>false</overridable>
-      <editable-only-at-install>true</editable-only-at-install>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>kernel.msgmax</name>
-    <value>65536</value>
-    <description>Default maxmimum size of a mesage queue</description>
-    <value-attributes>
-      <overridable>false</overridable>
-      <editable-only-at-install>true</editable-only-at-install>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>kernel.msgmni</name>
-    <value>2048</value>
-    <description>Number of message queue identifiers</description>
-    <value-attributes>
-      <overridable>false</overridable>
-      <editable-only-at-install>true</editable-only-at-install>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>net.ipv4.tcp_syncookies</name>
-    <value>0</value>
-    <description>Enable(1)/Disable(0) SYN cookie protection</description>
-    <value-attributes>
-      <overridable>false</overridable>
-      <editable-only-at-install>true</editable-only-at-install>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>net.ipv4.ip_forward</name>
-    <value>0</value>
-    <description>Enable(1)/Disable(0) IP forwarding</description>
-    <value-attributes>
-      <overridable>false</overridable>
-      <editable-only-at-install>true</editable-only-at-install>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>net.ipv4.conf.default.accept_source_route</name>
-    <value>0</value>
-    <description>Enable(1)/Disable(0) IP source routing</description>
-    <value-attributes>
-      <overridable>false</overridable>
-      <editable-only-at-install>true</editable-only-at-install>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>net.ipv4.tcp_tw_recycle</name>
-    <value>1</value>
-    <description>Enable(1)/Disable(0) fast recycling of TIME_WAIT sockets</description>
-    <value-attributes>
-      <overridable>false</overridable>
-      <editable-only-at-install>true</editable-only-at-install>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>net.ipv4.tcp_max_syn_backlog</name>
-    <value>200000</value>
-    <description>Number of outstanding syn requests allowed</description>
-    <value-attributes>
-      <overridable>false</overridable>
-      <editable-only-at-install>true</editable-only-at-install>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>net.ipv4.conf.all.arp_filter</name>
-    <value>1</value>
-    <description>Enable(1)/Disable(0) route lookup to determine the interface through which to send the reply</description>
-    <value-attributes>
-      <overridable>false</overridable>
-      <editable-only-at-install>true</editable-only-at-install>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>net.ipv4.ip_local_port_range</name>
-    <value>1281 65535</value>
-    <description>Local port range that is used by TCP and UDP traffic to choose the local port</description>
-    <value-attributes>
-      <overridable>false</overridable>
-      <editable-only-at-install>true</editable-only-at-install>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>net.core.netdev_max_backlog</name>
-    <value>200000</value>
-    <description>Maximum number of packets allowed to queue when a particular interface receives packets faster than the kernel can process them</description>
-    <value-attributes>
-      <overridable>false</overridable>
-      <editable-only-at-install>true</editable-only-at-install>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>fs.nr_open</name>
-    <value>3000000</value>
-    <description>Maximum number of file-handles a process can allocate</description>
-    <value-attributes>
-      <overridable>false</overridable>
-      <editable-only-at-install>true</editable-only-at-install>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>kernel.threads-max</name>
-    <value>798720</value>
-    <description>System-wide maximum number of threads to be used by the kernel at one time</description>
-    <value-attributes>
-      <overridable>false</overridable>
-      <editable-only-at-install>true</editable-only-at-install>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>kernel.pid_max</name>
-    <value>798720</value>
-    <description>Maximum number of unique process identifiers</description>
-    <value-attributes>
-      <overridable>false</overridable>
-      <editable-only-at-install>true</editable-only-at-install>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>net.core.rmem_max</name>
-    <value>2097152</value>
-    <description>Maximum receive socket buffer size in bytes</description>
-    <value-attributes>
-      <overridable>false</overridable>
-      <editable-only-at-install>true</editable-only-at-install>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>net.core.wmem_max</name>
-    <value>2097152</value>
-    <description>Maximum send socket buffer size in bytes</description>
-    <value-attributes>
-      <overridable>false</overridable>
-      <editable-only-at-install>true</editable-only-at-install>
-    </value-attributes>
-  </property>
-
-  <property>
-    <name>vm.overcommit_memory</name>
-    <value>1</value>
-    <description>Defines the conditions that determine whether a large memory request is accepted or denied. There are
-      three possible values for this parameter: 0, 1 or 2. For production environment, value of 2 is recommended</description>
-  </property>
-
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/metainfo.xml b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/metainfo.xml
deleted file mode 100644
index 2ceff96..0000000
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/metainfo.xml
+++ /dev/null
@@ -1,129 +0,0 @@
-<?xml version="1.0"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-
-<metainfo>
-  <schemaVersion>2.0</schemaVersion>
-  <services>
-    <service>
-      <name>HAWQ</name>
-      <displayName>HAWQ</displayName>
-      <comment>Apache HAWQ - Apache Hadoop Native SQL</comment>
-      <version>2.0</version>
-      <components>
-
-        <component>
-          <name>HAWQMASTER</name>
-          <displayName>HAWQ Master</displayName>
-          <category>MASTER</category>
-          <cardinality>1</cardinality>
-          <commandScript>
-            <script>scripts/hawqmaster.py</script>
-            <scriptType>PYTHON</scriptType>
-            <timeout>1200</timeout>
-          </commandScript>
-          <dependencies>
-            <dependency>
-              <name>HDFS/NAMENODE</name>
-              <scope>cluster</scope>
-              <auto-deploy>
-                <enabled>false</enabled>
-              </auto-deploy>
-            </dependency>
-            <dependency>
-              <name>HDFS/HDFS_CLIENT</name>
-              <scope>host</scope>
-              <auto-deploy>
-                  <enabled>true</enabled>
-              </auto-deploy>
-            </dependency>
-          </dependencies>
-        </component>
-
-      <component>
-          <name>HAWQSTANDBY</name>
-          <displayName>HAWQ Standby Master</displayName>
-          <category>MASTER</category>
-          <cardinality>0-1</cardinality>
-          <commandScript>
-              <script>scripts/hawqstandby.py</script>
-              <scriptType>PYTHON</scriptType>
-              <timeout>600</timeout>
-          </commandScript>
-          <dependencies>
-            <dependency>
-                <name>HDFS/HDFS_CLIENT</name>
-                <scope>host</scope>
-                <auto-deploy>
-                    <enabled>true</enabled>
-                </auto-deploy>
-            </dependency>
-          </dependencies>
-      </component>
-
-        <component>
-          <name>HAWQSEGMENT</name>
-          <displayName>HAWQ Segment</displayName>
-          <category>SLAVE</category>
-          <cardinality>1+</cardinality>
-          <commandScript>
-            <script>scripts/hawqsegment.py</script>
-            <scriptType>PYTHON</scriptType>
-            <timeout>600</timeout>
-          </commandScript>
-           <dependencies>
-            <dependency>
-              <name>HDFS/DATANODE</name>
-              <scope>host</scope>
-              <auto-deploy>
-                <enabled>false</enabled>
-                <co-locate>HDFS/DATANODE</co-locate>
-              </auto-deploy>
-            </dependency>
-          </dependencies>
-        </component>
-      </components> 
-      <requiredServices>
-          <service>HDFS</service>
-      </requiredServices>
-
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>any</osFamily>
-           <packages>
-            <package>
-              <name>hawq</name>
-            </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-
-      <commandScript>
-        <script>scripts/service_check.py</script>
-        <scriptType>PYTHON</scriptType>
-        <timeout>300</timeout>
-      </commandScript>
-
-      <configuration-dependencies>
-        <config-type>hawq-site</config-type>
-        <config-type>gpcheck-env</config-type>
-      </configuration-dependencies>
-    </service>
-
-  </services>
-</metainfo>
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/common.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/common.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/common.py
deleted file mode 100644
index 31ef2fa..0000000
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/common.py
+++ /dev/null
@@ -1,283 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-import re
-import os
-import time
-import crypt
-import filecmp
-from resource_management.libraries.resources.xml_config import XmlConfig
-from resource_management.core.resources.system import Execute, Directory, File
-from resource_management.core.logger import Logger
-from resource_management.core.system import System
-from resource_management.core.exceptions import Fail
-from resource_management.core.resources.accounts import Group, User
-from resource_management.core.source import Template
-import xml.etree.ElementTree as ET
-
-import utils
-import constants
-
-
-def update_bashrc(source_file, target_file):
-  """
-  Updates the hawq_user's .bashrc file with HAWQ env variables like
-  MASTER_DATA_DIRECTORY, PGHOST, PGPORT and PGUSER. 
-  And sources the greenplum_path file.
-  """
-  append_src_cmd = "echo 'source {0}' >> {1}".format(source_file, target_file)
-  src_cmd_exists = "grep 'source {0}' {1}".format(source_file, target_file)
-  Execute(append_src_cmd, user=constants.hawq_user, timeout=constants.default_exec_timeout, not_if=src_cmd_exists)
-
-
-def setup_user():
-  """
-  Creates HAWQ user home directory and sets up the correct ownership.
-  """
-  __create_hawq_user()
-  __set_home_dir_ownership()
-
-
-def __create_hawq_user():
-  """
-  Creates HAWQ user with default password and group.
-  """
-  import params
-  Group(constants.hawq_group, ignore_failures=True)
-
-  User(constants.hawq_user,
-       gid=constants.hawq_group,
-       password=crypt.crypt(constants.hawq_password, "salt"),
-       groups=[constants.hawq_group, params.user_group],
-       ignore_failures=True)
-
-
-def __set_home_dir_ownership():
-  """
-  Updates the HAWQ user home directory to be owned by gpadmin:gpadmin.
-  """
-  command = "chown -R {0}:{1} {2}".format(constants.hawq_user, constants.hawq_group, constants.hawq_home_dir)
-  Execute(command, timeout=constants.default_exec_timeout)
-
-
-def setup_common_configurations():
-  """
-  Sets up the config files common to master, standby and segment nodes.
-  """
-  import params
-
-  substituted_conf_dict = __substitute_hostnames_in_hawq_site()
-  XmlConfig("hawq-site.xml",
-            conf_dir=constants.hawq_config_dir,
-            configurations=substituted_conf_dict,
-            configuration_attributes=params.config['configuration_attributes']['hawq-site'],
-            owner=constants.hawq_user,
-            group=constants.hawq_group,
-            mode=0644)
-  __set_osparams()
-
-
-def __substitute_hostnames_in_hawq_site():
-  """
-  Temporary function to replace localhost with actual HAWQ component hostnames.
-  This function will be in place till the entire HAWQ plugin code along with the UI
-  changes are submitted to the trunk.
-  """
-  import params
-
-  LOCALHOST = "localhost"
-  
-  # in case there is no standby
-  hawqstandby_host_desired_value = params.hawqstandby_host if params.hawqstandby_host is not None else 'none' 
-  
-  substituted_hawq_site = params.hawq_site.copy()
-  hawq_site_property_map = {"hawq_master_address_host": params.hawqmaster_host,
-                            "hawq_standby_address_host": hawqstandby_host_desired_value,
-                            "hawq_rm_yarn_address": params.rm_host,
-                            "hawq_rm_yarn_scheduler_address": params.rm_host,
-                            "hawq_dfs_url": params.namenode_host
-                            }
-
-  for property, desired_value in hawq_site_property_map.iteritems():
-    if desired_value is not None:
-      # Replace localhost with required component hostname
-      substituted_hawq_site[property] = re.sub(LOCALHOST, desired_value, substituted_hawq_site[property])
-
-  return substituted_hawq_site
-
-
-def __set_osparams():
-  """
-  Updates parameters in sysctl.conf and limits.conf required by HAWQ.
-  """
-  # Create a temp scratchpad directory
-  utils.create_dir_as_hawq_user(constants.hawq_tmp_dir)
-
-  # Suse doesn't supports loading values from files in /etc/sysctl.d
-  # So we will have to directly edit the sysctl file
-  if System.get_instance().os_family == "suse":
-    # Update /etc/sysctl.conf
-    __update_sysctl_file_suse()
-  else:
-    # Update /etc/sysctl.d/hawq.conf
-    __update_sysctl_file()
-
-  __update_limits_file()
-
-
-def __update_limits_file():
-  """
-  Updates /etc/security/limits.d/hawq.conf file with the HAWQ parameters.
-  """
-  import params
-  # Ensure limits directory exists
-  Directory(constants.limits_conf_dir, recursive=True, owner=constants.root_user, group=constants.root_user)
-
-  # Generate limits for hawq user
-  limits_file_content = "#### HAWQ Limits Parameters  ###########\n"
-  for key, value in params.hawq_limits.iteritems():
-    if not __valid_input(value):
-      raise Exception("Value {0} for parameter {1} contains non-numeric characters which are not allowed (except whitespace), please fix the value and retry".format(value, key))
-    """
-    Content of the file to be written should be of the format
-    gpadmin soft nofile 290000
-    gpadmin hard nofile 290000
-    key used in the configuration is of the format soft_nofile, thus strip '_' and replace with 'space'
-    """
-    limits_file_content += "{0} {1} {2}\n".format(constants.hawq_user, re.sub("_", " ", key), value.strip())
-  File('{0}/{1}.conf'.format(constants.limits_conf_dir, constants.hawq_user), content=limits_file_content,
-       owner=constants.hawq_user, group=constants.hawq_group)
-
-
-def __valid_input(value):
-  """
-  Validate if input value contains number (whitespaces allowed), return true if found else false
-  """
-  return re.search("^ *[0-9][0-9 ]*$", value)
-
-
-def __convert_sysctl_dict_to_text():
-  """
-  Convert sysctl configuration dict to text with each property value pair separated on new line
-  """
-  import params
-  sysctl_file_content = "### HAWQ System Parameters ###########\n"
-  for key, value in params.hawq_sysctl.iteritems():
-    if not __valid_input(value):
-      raise Exception("Value {0} for parameter {1} contains non-numeric characters which are not allowed (except whitespace), please fix the value and retry".format(value, key))
-    sysctl_file_content += "{0} = {1}\n".format(key, value)
-  return sysctl_file_content
-
-
-def __update_sysctl_file():
-  """
-  Updates /etc/sysctl.d/hawq_sysctl.conf file with the HAWQ parameters on CentOS/RHEL.
-  """
-  # Ensure sys ctl sub-directory exists
-  Directory(constants.sysctl_conf_dir, recursive=True, owner=constants.root_user, group=constants.root_user)
-
-  # Generate temporary file with kernel parameters needed by hawq
-  File(constants.hawq_sysctl_tmp_file, content=__convert_sysctl_dict_to_text(), owner=constants.hawq_user,
-       group=constants.hawq_group)
-
-  is_changed = True
-  if os.path.exists(constants.hawq_sysctl_tmp_file) and os.path.exists(constants.hawq_sysctl_file):
-    is_changed = not filecmp.cmp(constants.hawq_sysctl_file, constants.hawq_sysctl_tmp_file)
-
-  if is_changed:
-    # Generate file with kernel parameters needed by hawq, only if something
-    # has been changed by user
-    Execute("cp -p {0} {1}".format(constants.hawq_sysctl_tmp_file, constants.hawq_sysctl_file))
-
-    # Reload kernel sysctl parameters from hawq file.
-    Execute("sysctl -e -p {0}".format(constants.hawq_sysctl_file), timeout=constants.default_exec_timeout)
-
-  # Wipe out temp file
-  File(constants.hawq_sysctl_tmp_file, action='delete')
-
-
-def __update_sysctl_file_suse():
-  """
-  Updates /etc/sysctl.conf file with the HAWQ parameters on SUSE.
-  """
-  # Backup file
-  backup_file_name = constants.sysctl_backup_file.format(str(int(time.time())))
-  try:
-    # Generate file with kernel parameters needed by hawq to temp file
-    File(constants.hawq_sysctl_tmp_file, content=__convert_sysctl_dict_to_text(), owner=constants.hawq_user,
-        group=constants.hawq_group)
-
-    sysctl_file_dict = utils.read_file_to_dict(constants.sysctl_suse_file)
-    sysctl_file_dict_original = sysctl_file_dict.copy()
-    hawq_sysctl_dict = utils.read_file_to_dict(constants.hawq_sysctl_tmp_file)
-
-    # Merge common system file with hawq specific file
-    sysctl_file_dict.update(hawq_sysctl_dict)
-
-    if sysctl_file_dict_original != sysctl_file_dict:
-      # Backup file
-      Execute("cp {0} {1}".format(constants.sysctl_suse_file, backup_file_name), timeout=constants.default_exec_timeout)
-      # Write merged properties to file
-      utils.write_dict_to_file(sysctl_file_dict, constants.sysctl_suse_file)
-      # Reload kernel sysctl parameters from /etc/sysctl.conf
-      Execute("sysctl -e -p", timeout=constants.default_exec_timeout)
-
-  except Exception as e:
-    Logger.error("Error occurred while updating sysctl.conf file, reverting the contents" + str(e))
-    Execute("cp {0} {1}".format(constants.sysctl_suse_file, constants.hawq_sysctl_tmp_file))
-    Execute("mv {0} {1}".format(backup_file_name, constants.sysctl_suse_file), timeout=constants.default_exec_timeout)
-    Logger.error("Please execute `sysctl -e -p` on the command line manually to reload the contents of file {0}".format(
-      constants.hawq_sysctl_tmp_file))
-    raise Fail("Failed to update sysctl.conf file ")
-
-
-def get_local_hawq_site_property(property_name):
-  """
-  Fetches the value of the property specified, from the local hawq-site.xml.
-  """
-  hawq_site_path = None
-  try:
-    hawq_site_path = os.path.join(constants.hawq_config_dir, "hawq-site.xml")
-    hawq_site_root = ET.parse(hawq_site_path).getroot()
-    for property in hawq_site_root.findall("property"):
-      for item in property:
-        if item.tag == 'name':
-          current_property_name = item.text.strip() if item and item.text else item.text
-        elif item.tag == 'value':
-          current_property_value = item.text.strip() if item and item.text else item.text
-      if property_name == current_property_name:
-          return current_property_value
-    raise #If property has not been found
-  except Exception:
-    raise Fail("Unable to read property {0} from local {1}".format(property_name, hawq_site_path))
-
-def validate_configuration():
-  """
-  Validates if YARN is present in the configuration when the user specifies YARN as HAWQ's resource manager.
-  """
-  import params
-
-  # At this point, hawq should be included.
-  if 'hawq-site' not in params.config['configurations']:
-    raise Fail("Configurations does not contain hawq-site. Please include HAWQ")
-
-  # If HAWQ is set to use YARN and YARN is not configured, error.
-  rm_type = params.config["configurations"]["hawq-site"].get("hawq_global_rm_type")
-  if rm_type == "yarn" and "yarn-site" not in params.config["configurations"]:
-    raise Fail("HAWQ is set to use YARN but YARN is not deployed. " + 
-               "hawq_global_rm_type property in hawq-site is set to 'yarn' but YARN is not configured. " + 
-               "Please deploy YARN before starting HAWQ or change the value of hawq_global_rm_type property to 'none'")

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/constants.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/constants.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/constants.py
deleted file mode 100644
index b56fd59..0000000
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/constants.py
+++ /dev/null
@@ -1,61 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-import os
-
-MASTER = "master"
-STANDBY = "standby"
-SEGMENT = "segment"
-START = "start"
-INIT = "init"
-STOP = "stop"
-
-# Users
-root_user = "root"
-hawq_user = "gpadmin"
-hawq_group = hawq_user
-hawq_password = hawq_user
-
-# Directories
-hawq_home_dir = "/usr/local/hawq/"
-hawq_config_dir = "/usr/local/hawq/etc/"
-hawq_pid_dir = "/var/run/hawq/"
-hawq_tmp_dir = '/tmp/hawq/'
-hawq_user_home_dir = os.path.expanduser("~{0}".format(hawq_user))
-limits_conf_dir = "/etc/security/limits.d"
-sysctl_conf_dir = "/etc/sysctl.d"
-
-# Files
-hawq_slaves_file = os.path.join(hawq_config_dir, "slaves")
-hawq_user_bashrc_file = os.path.join(hawq_user_home_dir, ".bashrc")
-hawq_greenplum_path_file = os.path.join(hawq_home_dir, "greenplum_path.sh")
-hawq_hosts_file = "/tmp/hawq_hosts"
-hawq_check_file = os.path.join(hawq_config_dir, "gpcheck.cnf")
-sysctl_suse_file = "/etc/sysctl.conf"
-sysctl_backup_file = "/etc/sysctl.conf.backup.{0}"
-hawq_sysctl_filename = "hawq_sysctl.conf"
-hawq_sysctl_tmp_file = os.path.join(hawq_tmp_dir, hawq_sysctl_filename)
-hawq_sysctl_file = os.path.join(sysctl_conf_dir, hawq_sysctl_filename)
-postmaster_opts_filename = "postmaster.opts"
-postmaster_pid_filename = "postmaster.pid"
-
-# Smoke check table
-smoke_check_table_name = "ambari_hawq_smoke_test"
-
-# Timeouts
-default_exec_timeout = 600
-hawq_operation_exec_timeout = 900

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqmaster.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqmaster.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqmaster.py
deleted file mode 100644
index 4443264..0000000
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqmaster.py
+++ /dev/null
@@ -1,55 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-from resource_management import Script
-from resource_management.libraries.functions.check_process_status import check_process_status
-
-import master_helper
-import common
-import constants
-
-class HawqMaster(Script):
-  """
-  Contains the interface definitions for methods like install, 
-  start, stop, status, etc. for the HAWQ Master
-  """
-
-  def install(self, env):
-    self.install_packages(env)
-    self.configure(env)
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-    env.set_params(constants)
-    master_helper.configure_master()
-
-  def start(self, env):
-    self.configure(env)
-    common.validate_configuration()
-    master_helper.start_master()
-
-  def stop(self, env):
-    master_helper.stop_master()
-
-  def status(self, env):
-    from hawqstatus import get_pid_file
-    check_process_status(get_pid_file())
-
-if __name__ == "__main__":
-  HawqMaster().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqsegment.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqsegment.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqsegment.py
deleted file mode 100644
index b4be502..0000000
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqsegment.py
+++ /dev/null
@@ -1,102 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-import os
-from resource_management import Script
-from resource_management.core.resources.system import Execute
-from resource_management.libraries.functions.check_process_status import check_process_status
-
-import utils
-import common
-import constants
-
-class HawqSegment(Script):
-  """
-  Contains the interface definitions for methods like install, 
-  start, stop, status, etc. for the HAWQ Segment
-  """
-
-  def install(self, env):
-    self.install_packages(env)
-    self.configure(env)
-
-  def configure(self, env):
-    import params
-
-    env.set_params(params)
-    env.set_params(constants)
-    common.setup_user()
-    common.setup_common_configurations()
-    common.update_bashrc(constants.hawq_greenplum_path_file, constants.hawq_user_bashrc_file)
-
-
-  def __start_segment(self):
-    import params
-    return utils.exec_hawq_operation(
-          constants.START, 
-          "{0} -a".format(constants.SEGMENT), 
-          not_if=utils.chk_postgres_status_cmd(params.hawq_segment_address_port))
-
-  def start(self, env):
-    self.configure(env)
-    common.validate_configuration()
-
-    if self.__is_segment_initialized():
-      self.__start_segment()
-      return
-
-    # Initialization also starts process.
-    self.__init_segment()
-
-
-  def stop(self, env):
-    import params
-
-    utils.exec_hawq_operation(constants.STOP, "{0} -a".format(constants.SEGMENT), only_if=utils.chk_postgres_status_cmd(
-                                params.hawq_segment_address_port))
-
-
-  def status(self, env):
-    from hawqstatus import get_pid_file
-    check_process_status(get_pid_file())
-
-
-  @staticmethod
-  def __init_segment():
-    import params
-
-    # Create segment directories
-    utils.create_dir_as_hawq_user(params.hawq_segment_dir)
-    utils.create_dir_as_hawq_user(params.hawq_segment_temp_dir.split(','))
-
-    Execute("chown {0}:{1} {2}".format(constants.hawq_user, constants.hawq_group, os.path.dirname(params.hawq_segment_dir)),
-            user=constants.root_user, timeout=constants.default_exec_timeout)
-
-    # Initialize hawq segment
-    utils.exec_hawq_operation(constants.INIT, "{0} -a -v".format(constants.SEGMENT))
-
-  def __is_segment_initialized(self):
-    """
-    Check whether the HAWQ Segment is initialized
-    """
-    import params
-    return os.path.exists(os.path.join(params.hawq_segment_dir, constants.postmaster_opts_filename))
-
-
-if __name__ == "__main__":
-  HawqSegment().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqstandby.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqstandby.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqstandby.py
deleted file mode 100644
index d8254f9..0000000
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqstandby.py
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-from resource_management import Script
-from resource_management.libraries.functions.check_process_status import check_process_status
-
-import master_helper
-import common
-import constants
-
-class HawqStandby(Script):
-  """
-  Contains the interface definitions for methods like install, 
-  start, stop, status, etc. for the HAWQ Standby Master
-  """
-
-  def install(self, env):
-    self.install_packages(env)
-    self.configure(env)
-
-  def configure(self, env):
-    import params
-    env.set_params(params)
-    env.set_params(constants)
-    master_helper.configure_master()
-
-  def start(self, env):
-    self.configure(env)
-    common.validate_configuration()
-    master_helper.start_master()
-
-  def stop(self, env):
-    master_helper.stop_master()
-
-  def status(self, env):
-    from hawqstatus import get_pid_file
-    check_process_status(get_pid_file())
-
-  def activatestandby(self, env):
-    pass
-
-if __name__ == "__main__":
-    HawqStandby().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqstatus.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqstatus.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqstatus.py
deleted file mode 100644
index 26dfdd0..0000000
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqstatus.py
+++ /dev/null
@@ -1,64 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-import os
-
-from resource_management import Script
-from resource_management.core.resources.system import File
-from resource_management.core.exceptions import Fail
-
-import utils
-import common
-import constants
-
-
-def get_pid_file():
-  """
-  Fetches the pid file, which will be used to get the status of the HAWQ Master, Standby
-  or Segments
-  """
-
-  config = Script.get_config()
-  
-  component_name = config['componentName']
-  component = "master" if component_name in ["HAWQMASTER", "HAWQSTANDBY"] else "segment"
-  hawq_pid_file = os.path.join(constants.hawq_pid_dir, "hawq-{0}.pid".format(component))
-
-  File(hawq_pid_file, action='delete')
-  utils.create_dir_as_hawq_user(constants.hawq_pid_dir)
-
-  #Get hawq_master_directory or hawq_segment_directory value from hawq-site.xml depending 
-  #on the component
-  hawq_site_directory_property = "hawq_{0}_directory".format(component)
-  
-  #hawq-site content from Ambari server will not be available when the 
-  #command type is STATUS_COMMAND. Hence, reading it directly from the local file
-  postmaster_pid_file = os.path.join(common.get_local_hawq_site_property(
-      hawq_site_directory_property), constants.postmaster_pid_filename)
-
-  pid = ""
-  if os.path.exists(postmaster_pid_file):
-    with open(postmaster_pid_file, 'r') as fh:
-      pid = fh.readline().strip()
-
-  if not pid:
-    raise Fail("Failed to fetch pid from {0}".format(postmaster_pid_file))
-
-  File(hawq_pid_file, content=pid, owner=constants.hawq_user, group=constants.hawq_user)
-
-  return hawq_pid_file

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/master_helper.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/master_helper.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/master_helper.py
deleted file mode 100644
index 35f5112..0000000
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/master_helper.py
+++ /dev/null
@@ -1,194 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-import os
-from resource_management.core.resources.system import File, Execute
-from resource_management.core.source import Template
-from resource_management.core.exceptions import Fail
-from resource_management.core.logger import Logger
-
-import utils
-import common
-import constants
-
-def __setup_master_specific_conf_files():
-  """
-  Sets up config files only applicable for HAWQ Master and Standby nodes
-  """
-  import params
-
-  File(constants.hawq_check_file, content=params.gpcheck_content, owner=constants.hawq_user, group=constants.hawq_group,
-      mode=0644)
-
-  File(constants.hawq_slaves_file, content=Template("slaves.j2"), owner=constants.hawq_user, group=constants.hawq_group,
-       mode=0644)
-
-  File(constants.hawq_hosts_file, content=Template("hawq-hosts.j2"), owner=constants.hawq_user, group=constants.hawq_group,
-       mode=0644)
-
-
-def __setup_passwordless_ssh():
-  """
-  Exchanges ssh keys to setup passwordless ssh for the hawq_user between the HAWQ Master and the HAWQ Segment nodes
-  """
-  utils.exec_hawq_operation("ssh-exkeys", "-f {0} -p {1}".format(constants.hawq_hosts_file, constants.hawq_password))
-
-  File(constants.hawq_hosts_file, action='delete')
-
-
-def __setup_hawq_user_profile():
-  """
-  Sets up the ENV variables for hawq_user as a convenience for the command line users
-  """
-  hawq_profile_file = os.path.join(os.path.expanduser("~{0}".format(constants.hawq_user)), ".hawq-profile.sh")
-  File(hawq_profile_file, content=Template("hawq-profile.sh.j2"), owner=constants.hawq_user, group=constants.hawq_group)
-  common.update_bashrc(hawq_profile_file, constants.hawq_user_bashrc_file)
-
-
-def configure_master():
-  """
-  Configures the master node after rpm install
-  """
-  common.setup_user()
-  common.setup_common_configurations()
-  __setup_master_specific_conf_files()
-  __setup_passwordless_ssh()
-  __setup_hawq_user_profile()
-  __create_local_dirs()
-
-
-def __create_local_dirs():
-  """
-  Creates the required local directories for HAWQ 
-  """
-  import params
-  # Create Master directories
-  utils.create_dir_as_hawq_user(params.hawq_master_dir)
-  utils.create_dir_as_hawq_user(params.hawq_master_temp_dir.split(','))
-
-  Execute("chown {0}:{1} {2}".format(constants.hawq_user, constants.hawq_group, os.path.dirname(params.hawq_master_dir)),
-          user=constants.root_user, timeout=constants.default_exec_timeout)
-
-  Execute("chmod 700 {0}".format(params.hawq_master_dir), user=constants.root_user, timeout=constants.default_exec_timeout)
-
-
-def __create_hdfs_dirs():
-  """
-  Creates the required HDFS directories for HAWQ
-  """
-  import params
-  params.HdfsResource(params.hawq_hdfs_data_dir, type="directory", action="create_on_execute", owner=constants.hawq_user, group=constants.hawq_group, mode=0755)
-  params.HdfsResource(None, action="execute")
-
-
-def __init_active():
-  """
-  Initializes the active master
-  """
-  __create_hdfs_dirs()
-  utils.exec_hawq_operation(constants.INIT, "{0} -a -v".format(constants.MASTER))
-
-
-def __init_standby():
-  """
-  Initializes the HAWQ Standby Master
-  """
-  utils.exec_hawq_operation(constants.INIT, "{0} -a -v".format(constants.STANDBY))
-
-
-def __get_component_name():
-  """
-  Identifies current node as either HAWQ Master or HAWQ Standby Master
-  """
-  return constants.MASTER if __is_active_master() else constants.STANDBY
-
-
-def __start_local_master():
-  """
-  Starts HAWQ Master or HAWQ Standby Master component on the host
-  """
-  import params
-  utils.exec_hawq_operation(
-        constants.START, 
-        "{0} -a".format(__get_component_name()), 
-        not_if=utils.chk_postgres_status_cmd(params.hawq_master_address_port))
-
-  
-def __is_local_initialized():
-  """
-  Checks if the local node has been initialized
-  """
-  import params
-  return os.path.exists(os.path.join(params.hawq_master_dir, constants.postmaster_opts_filename))
-
-
-def __get_standby_host():
-  """
-  Returns the name of the HAWQ Standby Master host from hawq-site.xml, or None if no standby is configured
-  """
-  standby_host = common.get_local_hawq_site_property("hawq_standby_address_host")
-  return None if standby_host is None or standby_host.lower() == 'none' else standby_host
-
-
-def __is_standby_initialized():
-  """
-  Returns True if HAWQ Standby Master is initialized, False otherwise
-  """
-  import params
-  
-  file_path = os.path.join(params.hawq_master_dir, constants.postmaster_opts_filename)
-  (retcode, _, _) = utils.exec_ssh_cmd(__get_standby_host(), "[ -f {0} ]".format(file_path))
-  return retcode == 0
-
-
-def start_master():
-  """
-  Initializes HAWQ Master/Standby if not already done and starts them
-  """
-  import params
-
-  if not params.hostname in [params.hawqmaster_host, params.hawqstandby_host]:
-    Fail("Host should be either active Hawq master or Hawq standby.")
-
-  is_active_master = __is_active_master()
-  if __is_local_initialized():
-    __start_local_master()
-
-  elif is_active_master:
-    __init_active()
-
-  if is_active_master and __get_standby_host() is not None and not __is_standby_initialized():
-    __init_standby()
-
-
-def stop_master():
-  """
-  Stops the HAWQ Master/Standby
-  """
-  import params
-  utils.exec_hawq_operation(
-                constants.STOP,
-                "{0} -a".format(__get_component_name()),
-                only_if=utils.chk_postgres_status_cmd(params.hawq_master_address_port))
-
-
-def __is_active_master():
-  """
-  Finds if this node is the active master
-  """
-  import params
-  return params.hostname == common.get_local_hawq_site_property("hawq_master_address_host")

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
deleted file mode 100644
index fb449b9..0000000
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
+++ /dev/null
@@ -1,92 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-import functools
-from resource_management import Script
-from resource_management.libraries.functions.default import default
-from resource_management.libraries.resources.hdfs_resource import HdfsResource
-
-config = Script.get_config()
-
-
-def __get_component_host(component):
-  """
-  Returns the first host where the given component is deployed, None if the component is not deployed
-  """
-  component_host = None
-  if component in config['clusterHostInfo'] and len(config['clusterHostInfo'][component]) > 0:
-    component_host = config['clusterHostInfo'][component][0]
-  return component_host
-
-
-def __get_namenode_host():
-  """
-  Gets the namenode host; active namenode in case of HA
-  """
-  namenode_host = __get_component_host('namenode_host')
-  
-  # hostname of the active HDFS HA Namenode (only used when HA is enabled)
-  dfs_ha_namenode_active = default('/configurations/hadoop-env/dfs_ha_initial_namenode_active', None)
-  if dfs_ha_namenode_active is not None:
-    namenode_host = dfs_ha_namenode_active
-  return namenode_host
-
-
-hostname = config['hostname']
-
-# Users and Groups
-hdfs_superuser = config['configurations']['hadoop-env']['hdfs_user']
-user_group = config['configurations']['cluster-env']['user_group']
-
-# HAWQ Hostnames
-hawqmaster_host = __get_component_host('hawqmaster_hosts')
-hawqstandby_host = __get_component_host('hawqstandby_hosts')
-hawqsegment_hosts = default('/clusterHostInfo/hawqsegment_hosts', [])
-
-# HDFS
-hdfs_site = config['configurations']['hdfs-site']
-default_fs = config['configurations']['core-site']['fs.defaultFS']
-
-# HDFSResource partial function
-HdfsResource = functools.partial(HdfsResource, user=hdfs_superuser, hdfs_site=hdfs_site, default_fs=default_fs)
-
-namenode_host= __get_namenode_host()
-
-# YARN
-# Note: YARN is not mandatory for HAWQ. It is required only when the users set HAWQ to use YARN as resource manager
-rm_host = __get_component_host('rm_host')
-
-# Config files
-gpcheck_content = config['configurations']['gpcheck-env']['content']
-# database user limits
-hawq_limits = config['configurations']['hawq-limits-env']
-# sysctl parameters
-hawq_sysctl = config['configurations']['hawq-sysctl-env']
-
-hawq_site = config['configurations']['hawq-site']
-hawq_master_dir = hawq_site.get('hawq_master_directory')
-hawq_segment_dir = hawq_site.get('hawq_segment_directory')
-hawq_master_temp_dir = hawq_site.get('hawq_master_temp_directory')
-hawq_segment_temp_dir = hawq_site.get('hawq_segment_temp_directory')
-# Extract hawq hdfs directory from hdfs url. Ex: /hawq/hawq_default from
-# host:8080/hawq/hawq_default
-hawq_hdfs_data_dir = "/{0}".format(hawq_site.get('hawq_dfs_url').split('/', 1)[1])
-hawq_master_address_port = hawq_site.get('hawq_master_address_port')
-hawq_segment_address_port = hawq_site.get('hawq_segment_address_port')
-
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/service_check.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/service_check.py
deleted file mode 100644
index 7a99f5e..0000000
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/service_check.py
+++ /dev/null
@@ -1,102 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-import common
-import constants
-from utils import exec_psql_cmd, exec_ssh_cmd
-from resource_management.libraries.script import Script
-from resource_management.core.exceptions import Fail
-from resource_management.core.logger import Logger
-
-import sys
-
-class HAWQServiceCheck(Script):
-  """
-  Runs a set of simple HAWQ tests to verify if the service has been setup correctly
-  """
-
-  def __init__(self):
-    self.active_master_host = common.get_local_hawq_site_property("hawq_master_address_host")
-
-
-  def service_check(self, env):
-    Logger.info("Starting HAWQ service checks..")
-    # All the tests are run on the active_master_host using ssh irrespective of the node on which service check
-    # is executed by Ambari
-    try:
-      self.check_state()
-      self.drop_table()
-      self.create_table()
-      self.insert_data()
-      self.query_data()
-      self.check_data_correctness()
-    except:
-      Logger.error("Service check failed")
-      sys.exit(1)
-    finally:
-      self.drop_table()
-
-    Logger.info("Service check completed successfully")
-
-
-  def drop_table(self):
-    Logger.info("Dropping {0} table if exists".format(constants.smoke_check_table_name))
-    sql_cmd = "drop table if exists {0}".format(constants.smoke_check_table_name)
-    exec_psql_cmd(sql_cmd, self.active_master_host)
-
-
-  def create_table(self):
-    Logger.info("Creating table {0}".format(constants.smoke_check_table_name))
-    sql_cmd = "create table {0} (col1 int) distributed randomly".format(constants.smoke_check_table_name)
-    exec_psql_cmd(sql_cmd, self.active_master_host)
-
-
-  def insert_data(self):
-    Logger.info("Inserting data to table {0}".format(constants.smoke_check_table_name))
-    sql_cmd = "insert into {0} select * from generate_series(1,10)".format(constants.smoke_check_table_name)
-    exec_psql_cmd(sql_cmd, self.active_master_host)
-
-
-  def query_data(self):
-    Logger.info("Querying data from table {0}".format(constants.smoke_check_table_name))
-    sql_cmd = "select * from {0}".format(constants.smoke_check_table_name)
-    exec_psql_cmd(sql_cmd, self.active_master_host)
-
-
-  def check_data_correctness(self):
-    expected_data = "55"
-    Logger.info("Validating data inserted, finding sum of all the inserted entries. Expected output: {0}".format(expected_data))
-    sql_cmd = "select sum(col1) from {0}".format(constants.smoke_check_table_name)
-    _, stdout, _ = exec_psql_cmd(sql_cmd, self.active_master_host, tuples_only=False)
-    if expected_data != stdout.strip():
-      Logger.error("Incorrect data returned. Expected Data: {0} Actual Data: {1}".format(expected_data, stdout))
-      raise Fail("Incorrect data returned.")
-
-
-  def check_state(self):
-    import params
-    command = "source {0} && hawq state -d {1}".format(constants.hawq_greenplum_path_file, params.hawq_master_dir)
-    Logger.info("Executing hawq status check..")
-    (retcode, out, err) = exec_ssh_cmd(self.active_master_host, command)
-    if retcode:
-      Logger.error("hawq state command returned non-zero result: {0}. Out: {1} Error: {2}".format(retcode, out, err))
-      raise Fail("Unexpected result of hawq state command.")
-    Logger.info("Output of command:\n{0}".format(str(out) + "\n"))
-
-
-if __name__ == "__main__":
-  HAWQServiceCheck().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/utils.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/utils.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/utils.py
deleted file mode 100644
index da51c19..0000000
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/utils.py
+++ /dev/null
@@ -1,108 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-import subprocess
-
-from resource_management.core.resources.system import Execute, Directory
-from resource_management.core.exceptions import Fail
-from resource_management.core.logger import Logger
-
-import constants
-
-def chk_postgres_status_cmd(port):
-  """
-  Checks if the postgres port is occupied
-  """
-  return "netstat -tupln | egrep ':{0}\s' | egrep postgres".format(port)
-
-
-def create_dir_as_hawq_user(directory):
-  """
-  Creates directories with hawq_user and hawq_group (defaults to gpadmin:gpadmin)
-  """
-  Directory(directory, recursive=True, owner=constants.hawq_user, group=constants.hawq_group)
-
-
-def exec_hawq_operation(operation, option, not_if=None, only_if=None, logoutput=True):
-  """
-  Sets up execution environment and runs a given command as HAWQ user
-  """
-  hawq_cmd = "source {0} && hawq {1} {2}".format(constants.hawq_greenplum_path_file, operation, option)
-  Execute(
-        hawq_cmd,
-        user=constants.hawq_user,
-        timeout=constants.hawq_operation_exec_timeout,
-        not_if=not_if,
-        only_if=only_if,
-        logoutput=logoutput)
-
-
-def read_file_to_dict(file_name):
-  """ 
-  Converts a file with key=value format to dictionary
-  """
-  with open(file_name, "r") as fh:
-    lines = fh.readlines()
-    lines = [item for item in lines if '=' in item]
-    result_dict = dict(item.split("=") for item in lines)
-  return result_dict
-
-
-def write_dict_to_file(source_dict, dest_file):
-  """
-  Writes a dictionary into a file with key=value format
-  """
-  with open(dest_file, "w") as fh:
-    for property_key, property_value in source_dict.items():
-      if property_value is None:
-        fh.write(property_key + "\n")
-      else:
-        fh.write("{0}={1}\n".format(property_key, property_value))
-
-
-def exec_ssh_cmd(hostname, cmd):
-  """
-  Runs the command on the remote host as gpadmin user
-  """
-  import params
-  # Only gpadmin should be allowed to run command via ssh, thus not exposing user as a parameter
-  if params.hostname != hostname:
-    cmd = "su - {0} -c 'ssh -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null {1} \"{2} \" '".format(constants.hawq_user, hostname, cmd)
-  else:
-    cmd = "su - {0} -c \"{1}\"".format(constants.hawq_user, cmd)
-  Logger.info("Command executed: {0}".format(cmd))
-  process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
-  (stdout, stderr) = process.communicate()
-  return process.returncode, stdout, stderr
-
-
-def exec_psql_cmd(command, host, db="template1", tuples_only=True):
-  """
-  Sets up execution environment and runs the HAWQ queries
-  """
-  src_cmd = "source {0}".format(constants.hawq_greenplum_path_file)
-  if tuples_only:
-    cmd = src_cmd + " && psql -d {0} -c \\\"{1};\\\"".format(db, command)
-  else:
-    cmd = src_cmd + " && psql -t -d {0} -c \\\"{1};\\\"".format(db, command)
-  retcode, out, err = exec_ssh_cmd(host, cmd)
-  if retcode:
-    Logger.error("SQL command executed failed: {0}\nReturncode: {1}\nStdout: {2}\nStderr: {3}".format(cmd, retcode, out, err))
-    raise Fail("SQL command executed failed.")
-
-  Logger.info("Output:\n{0}".format(out))
-  return retcode, out, err

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/templates/hawq-hosts.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/templates/hawq-hosts.j2 b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/templates/hawq-hosts.j2
deleted file mode 100644
index 9bdaa75..0000000
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/templates/hawq-hosts.j2
+++ /dev/null
@@ -1,5 +0,0 @@
-{{hawqmaster_host}}
-{{hawqstandby_host}}
-{% for host in hawqsegment_hosts %}
-{{host}}
-{% endfor %}

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/templates/hawq-profile.sh.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/templates/hawq-profile.sh.j2 b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/templates/hawq-profile.sh.j2
deleted file mode 100644
index e28e416..0000000
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/templates/hawq-profile.sh.j2
+++ /dev/null
@@ -1,8 +0,0 @@
-######################################
-##### HAWQ env #######################
-######################################
-source {{hawq_greenplum_path_file}}
-export MASTER_DATA_DIRECTORY={{hawq_master_dir}}
-export PGPORT={{hawq_master_address_port}}
-export PGHOST={{hawqmaster_host}}
-export PGUSER={{hawq_user}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/templates/slaves.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/templates/slaves.j2 b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/templates/slaves.j2
deleted file mode 100644
index 591d92b..0000000
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/templates/slaves.j2
+++ /dev/null
@@ -1,3 +0,0 @@
-{% for host in hawqsegment_hosts %}
-{{host}}
-{% endfor %}

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/PXF/3.0.0.0/configuration/pxf-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PXF/3.0.0.0/configuration/pxf-site.xml b/ambari-server/src/main/resources/common-services/PXF/3.0.0.0/configuration/pxf-site.xml
new file mode 100644
index 0000000..0b3a36e
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/PXF/3.0.0.0/configuration/pxf-site.xml
@@ -0,0 +1,19 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<configuration> 
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/PXF/3.0.0.0/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PXF/3.0.0.0/metainfo.xml b/ambari-server/src/main/resources/common-services/PXF/3.0.0.0/metainfo.xml
new file mode 100644
index 0000000..f578d64
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/PXF/3.0.0.0/metainfo.xml
@@ -0,0 +1,71 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>PXF</name>
+      <displayName>PXF</displayName>
+      <comment>HAWQ Extension Framework</comment>
+      <version>3.0</version>
+      <components>
+
+        <component>
+          <name>PXF</name>
+          <displayName>PXF</displayName>
+          <category>SLAVE</category>
+          <cardinality>1+</cardinality>
+          <commandScript>
+            <script>scripts/pxf.py</script>
+            <scriptType>PYTHON</scriptType>
+            <timeout>600</timeout>
+          </commandScript>
+        </component>
+      </components>
+
+      <requiredServices>
+         <service>HAWQ</service>
+      </requiredServices>
+
+      <osSpecifics>
+        <osSpecific>
+          <osFamily>any</osFamily>
+           <packages>
+             <package>
+               <name>pxf-service</name>
+             </package>
+             <package>
+               <name>apache-tomcat</name>
+             </package>
+             <package>
+               <name>pxf-hive</name>
+             </package>
+             <package>
+               <name>pxf-hdfs</name>
+             </package>
+             <package>
+               <name>pxf-hbase</name>
+             </package>
+          </packages>
+        </osSpecific>
+      </osSpecifics>
+
+    </service>
+  </services>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/PXF/3.0.0.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PXF/3.0.0.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/PXF/3.0.0.0/package/scripts/params.py
new file mode 100644
index 0000000..a4986c9
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/PXF/3.0.0.0/package/scripts/params.py
@@ -0,0 +1,42 @@
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+from resource_management import Script
+
+config = Script.get_config()
+
+
+pxf_service_name = "pxf-service"
+stack_name = str(config["hostLevelParams"]["stack_name"])
+
+# Users and Groups
+pxf_user = "pxf"
+pxf_group = pxf_user
+hdfs_superuser_group = config["configurations"]["hdfs-site"]["dfs.permissions.superusergroup"]
+user_group = config["configurations"]["cluster-env"]["user_group"]
+tomcat_group = "tomcat"
+
+# Directories
+pxf_conf_dir = "/etc/pxf/conf"
+pxf_instance_dir = "/var/pxf"
+
+# Java home path
+java_home = config["hostLevelParams"]["java_home"] if "java_home" in config["hostLevelParams"] else None
+
+# Timeouts
+default_exec_timeout = 600

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/PXF/3.0.0.0/package/scripts/pxf.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PXF/3.0.0.0/package/scripts/pxf.py b/ambari-server/src/main/resources/common-services/PXF/3.0.0.0/package/scripts/pxf.py
new file mode 100644
index 0000000..dd0031c
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/PXF/3.0.0.0/package/scripts/pxf.py
@@ -0,0 +1,120 @@
+#!/usr/bin/env python
+
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+from resource_management import Script
+
+from resource_management.libraries.resources.xml_config import XmlConfig
+from resource_management.core.resources.accounts import User
+from resource_management.core.resources.system import Directory, File, Execute
+from resource_management.core.source import Template
+
+
+
+class Pxf(Script):
+  """
+  Contains the interface definitions for methods like install,
+  start, stop, status, etc. for the PXF
+  """
+
+  def install(self, env):
+    self.install_packages(env)
+    self.configure(env)
+
+
+  def configure(self, env):
+    import params
+    env.set_params(params)
+    self.__setup_user_group()
+    self.__generate_config_files()
+    # pxf-service init exits safely when it is already initialized
+    self.__execute_service_command("init")
+
+
+  def start(self, env):
+    self.configure(env)
+    self.__grant_permissions()
+    self.__execute_service_command("restart")
+
+
+  def stop(self, env):
+    self.__execute_service_command("stop")
+
+
+  def status(self, env):
+    try:
+      self.__execute_service_command("status")
+    except Exception:
+      raise ComponentIsNotRunning()
+
+
+  def __execute_service_command(self, command):
+    import params
+    Execute("service {0} {1}".format(params.pxf_service_name, command),
+              timeout=params.default_exec_timeout,
+              logoutput=True)
+
+
+  def __setup_user_group(self):
+    """
+    Creates PXF user with the required groups and bash as default shell
+    """
+    import params
+    User(params.pxf_user,
+         groups=[params.hdfs_superuser_group, params.user_group, params.tomcat_group],
+         shell="/bin/bash")
+
+
+  def __generate_config_files(self):
+    """
+    Generates pxf-env.sh file from jinja template and sets the classpath for HDP
+    """
+    import params
+    import shutil
+
+    hdp_stack = "HDP"
+
+    # Create file pxf-env.sh from jinja template
+    File("{0}/pxf-env.sh".format(params.pxf_conf_dir),
+         content = Template("pxf-env.j2"))
+
+    # Classpath is set for PHD by default. If stack is HDP, set classpath for HDP
+    if(params.stack_name == hdp_stack):
+      shutil.copy2("{0}/pxf-privatehdp.classpath".format(params.pxf_conf_dir),
+                   "{0}/pxf-private.classpath".format(params.pxf_conf_dir))
+
+    XmlConfig("pxf-site.xml",
+              conf_dir=params.pxf_conf_dir,
+              configurations=params.config['configurations']['pxf-site'],
+              configuration_attributes=params.config['configuration_attributes']['pxf-site'])
+
+
+  def __grant_permissions(self):
+    """
+    Grants permission to pxf:pxf for PXF instance directory
+    """
+    import params
+    Directory(params.pxf_instance_dir,
+              owner=params.pxf_user,
+              group=params.pxf_group,
+              recursive=True)
+
+
+if __name__ == "__main__":
+  Pxf().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/PXF/3.0.0.0/package/templates/pxf-env.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PXF/3.0.0.0/package/templates/pxf-env.j2 b/ambari-server/src/main/resources/common-services/PXF/3.0.0.0/package/templates/pxf-env.j2
new file mode 100644
index 0000000..03f2420
--- /dev/null
+++ b/ambari-server/src/main/resources/common-services/PXF/3.0.0.0/package/templates/pxf-env.j2
@@ -0,0 +1,34 @@
+#!/bin/sh
+
+#Licensed to the Apache Software Foundation (ASF) under one
+#or more contributor license agreements.  See the NOTICE file
+#distributed with this work for additional information
+#regarding copyright ownership.  The ASF licenses this file
+#to you under the Apache License, Version 2.0 (the
+#"License"); you may not use this file except in compliance
+#with the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#Unless required by applicable law or agreed to in writing, software
+#distributed under the License is distributed on an "AS IS" BASIS,
+#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#See the License for the specific language governing permissions and
+#limitations under the License.
+
+
+# THIS FILE SHOULD MATCH WITH https://github.com/apache/incubator-hawq/blob/master/pxf/pxf-service/src/scripts/pxf-env.sh
+
+# Path to HDFS native libraries
+export LD_LIBRARY_PATH=/usr/lib/hadoop/lib/native:${LD_LIBRARY_PATH}
+
+# Path to JAVA
+export JAVA_HOME={{java_home}}
+
+# Path to Log directory
+export PXF_LOGDIR=/var/log/pxf
+export CATALINA_OUT=${PXF_LOGDIR}/catalina.out
+
+# Path to Run directory
+export PXF_RUNDIR=/var/run/pxf
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/PXF/3.0.0/configuration/pxf-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PXF/3.0.0/configuration/pxf-site.xml b/ambari-server/src/main/resources/common-services/PXF/3.0.0/configuration/pxf-site.xml
deleted file mode 100644
index 0b3a36e..0000000
--- a/ambari-server/src/main/resources/common-services/PXF/3.0.0/configuration/pxf-site.xml
+++ /dev/null
@@ -1,19 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<configuration> 
-</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/PXF/3.0.0/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PXF/3.0.0/metainfo.xml b/ambari-server/src/main/resources/common-services/PXF/3.0.0/metainfo.xml
deleted file mode 100644
index f578d64..0000000
--- a/ambari-server/src/main/resources/common-services/PXF/3.0.0/metainfo.xml
+++ /dev/null
@@ -1,71 +0,0 @@
-<?xml version="1.0"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-
-<metainfo>
-  <schemaVersion>2.0</schemaVersion>
-  <services>
-    <service>
-      <name>PXF</name>
-      <displayName>PXF</displayName>
-      <comment>HAWQ Extension Framework</comment>
-      <version>3.0</version>
-      <components>
-
-        <component>
-          <name>PXF</name>
-          <displayName>PXF</displayName>
-          <category>SLAVE</category>
-          <cardinality>1+</cardinality>
-          <commandScript>
-            <script>scripts/pxf.py</script>
-            <scriptType>PYTHON</scriptType>
-            <timeout>600</timeout>
-          </commandScript>
-        </component>
-      </components>
-
-      <requiredServices>
-         <service>HAWQ</service>
-      </requiredServices>
-
-      <osSpecifics>
-        <osSpecific>
-          <osFamily>any</osFamily>
-           <packages>
-             <package>
-               <name>pxf-service</name>
-             </package>
-             <package>
-               <name>apache-tomcat</name>
-             </package>
-             <package>
-               <name>pxf-hive</name>
-             </package>
-             <package>
-               <name>pxf-hdfs</name>
-             </package>
-             <package>
-               <name>pxf-hbase</name>
-             </package>
-          </packages>
-        </osSpecific>
-      </osSpecifics>
-
-    </service>
-  </services>
-</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/810a32ab/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/params.py
deleted file mode 100644
index a4986c9..0000000
--- a/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/params.py
+++ /dev/null
@@ -1,42 +0,0 @@
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-"""
-
-from resource_management import Script
-
-config = Script.get_config()
-
-
-pxf_service_name = "pxf-service"
-stack_name = str(config["hostLevelParams"]["stack_name"])
-
-# Users and Groups
-pxf_user = "pxf"
-pxf_group = pxf_user
-hdfs_superuser_group = config["configurations"]["hdfs-site"]["dfs.permissions.superusergroup"]
-user_group = config["configurations"]["cluster-env"]["user_group"]
-tomcat_group = "tomcat"
-
-# Directories
-pxf_conf_dir = "/etc/pxf/conf"
-pxf_instance_dir = "/var/pxf"
-
-# Java home path
-java_home = config["hostLevelParams"]["java_home"] if "java_home" in config["hostLevelParams"] else None
-
-# Timeouts
-default_exec_timeout = 600