You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by dm...@apache.org on 2014/02/18 22:06:32 UTC

[1/2] AMBARI-4481. Add to the agent ability to download service scripts and hooks (dlysnichenko)

Repository: ambari
Updated Branches:
  refs/heads/trunk f2c18bcd7 -> 02f9c4531


http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-server/src/test/python/TestResourceFilesKeeper.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/TestResourceFilesKeeper.py b/ambari-server/src/test/python/TestResourceFilesKeeper.py
new file mode 100644
index 0000000..6ec8436
--- /dev/null
+++ b/ambari-server/src/test/python/TestResourceFilesKeeper.py
@@ -0,0 +1,319 @@
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+import time
+import subprocess
+import os
+import logging
+import tempfile
+import pprint
+from xml.dom import minidom
+
+from unittest import TestCase
+from subprocess import Popen
+from mock.mock import MagicMock, call
+from mock.mock import patch
+from mock.mock import create_autospec
+from ambari_server.resourceFilesKeeper import ResourceFilesKeeper, KeeperException
+
+
+class TestResourceFilesKeeper(TestCase):
+
+  TEST_STACKS_DIR="../resources/stacks"
+
+  # Stack that is not expected to change
+  DUMMY_UNCHANGEABLE_STACK="../resources/TestAmbaryServer.samples/" \
+                           "dummy_stack/HIVE/"
+
+  DUMMY_ACTIVE_STACK="../resources/TestAmbaryServer.samples/" \
+                           "active_stack/"
+
+  DUMMY_INACTIVE_STACK="../resources/TestAmbaryServer.samples/" \
+                     "inactive_stack/"
+
+  DUMMY_UNCHANGEABLE_PACKAGE=os.path.join(DUMMY_UNCHANGEABLE_STACK,
+                                    ResourceFilesKeeper.PACKAGE_DIR)
+
+  DUMMY_UNCHANGEABLE_PACKAGE_HASH="4cd211c4d16b49b3546ecc9b7996698105fe10e4"
+  DUMMY_HASH="dummy_hash"
+  YA_HASH="yet_another_hash"
+  SOME_PATH="some-path"
+
+  def setUp(self):
+    logging.basicConfig(level=logging.ERROR)
+
+
+  @patch.object(ResourceFilesKeeper, "update_directory_archieves")
+  def test_perform_housekeeping(self, update_directory_archieves_mock):
+    resource_files_keeper = ResourceFilesKeeper("/dummy-path")
+    resource_files_keeper.perform_housekeeping()
+    update_directory_archieves_mock.assertCalled()
+
+
+  @patch.object(ResourceFilesKeeper, "update_directory_archive")
+  @patch.object(ResourceFilesKeeper, "list_active_stacks")
+  @patch("os.path.abspath")
+  def test_update_directory_archieves(self, abspath_mock,
+                                      list_active_stacks_mock,
+                                      update_directory_archive_mock):
+    list_active_stacks_mock.return_value = [self.DUMMY_UNCHANGEABLE_STACK,
+                                            self.DUMMY_UNCHANGEABLE_STACK,
+                                            self.DUMMY_UNCHANGEABLE_STACK]
+    abspath_mock.side_effect = lambda s : s
+    resource_files_keeper = ResourceFilesKeeper(self.TEST_STACKS_DIR)
+    resource_files_keeper.update_directory_archieves()
+    self.assertEquals(pprint.pformat(
+      update_directory_archive_mock.call_args_list),
+            "[call('../resources/TestAmbaryServer.samples/"
+            "dummy_stack/HIVE/package'),\n "
+            "call('../resources/TestAmbaryServer.samples/"
+            "dummy_stack/HIVE/package'),\n "
+            "call('../resources/TestAmbaryServer.samples/"
+            "dummy_stack/HIVE/package'),\n "
+            "call('../resources/stacks/custom_actions')]")
+
+
+
+  @patch("glob.glob")
+  @patch("os.path.exists")
+  @patch.object(ResourceFilesKeeper, "is_active_stack")
+  def test_list_active_stacks(self, is_active_stack_mock, exists_mock, glob_mock):
+    resource_files_keeper = ResourceFilesKeeper(self.SOME_PATH)
+    # Test normal execution flow
+    glob_mock.return_value = ["stack1", "stack2", "stack3", "stack4", "stack5"]
+    exists_mock.side_effect = [True, True, False, True, True]
+    is_active_stack_mock.side_effect = [True, False,      False, True]
+    res = resource_files_keeper.list_active_stacks(self.SOME_PATH)
+    self.assertEquals(pprint.pformat(res), "['stack1', 'stack5']")
+
+    # Test exception handling
+    glob_mock.side_effect = self.keeper_exc_side_effect
+    try:
+      resource_files_keeper.list_active_stacks(self.SOME_PATH)
+      self.fail('KeeperException not thrown')
+    except KeeperException:
+      pass # Expected
+    except Exception, e:
+      self.fail('Unexpected exception thrown:' + str(e))
+
+
+  @patch.object(ResourceFilesKeeper, "count_hash_sum")
+  @patch.object(ResourceFilesKeeper, "read_hash_sum")
+  @patch.object(ResourceFilesKeeper, "zip_directory")
+  @patch.object(ResourceFilesKeeper, "write_hash_sum")
+  def test_update_directory_archive(self, write_hash_sum_mock,
+                                    zip_directory_mock, read_hash_sum_mock,
+                                    count_hash_sum_mock):
+    # Test situation when there is no saved directory hash
+    read_hash_sum_mock.return_value = None
+    count_hash_sum_mock.return_value = self.YA_HASH
+    resource_files_keeper = ResourceFilesKeeper(self.SOME_PATH)
+    resource_files_keeper.update_directory_archive(self.SOME_PATH)
+    self.assertTrue(read_hash_sum_mock.called)
+    self.assertTrue(count_hash_sum_mock.called)
+    self.assertTrue(zip_directory_mock.called)
+    self.assertTrue(write_hash_sum_mock.called)
+
+    read_hash_sum_mock.reset_mock()
+    count_hash_sum_mock.reset_mock()
+    zip_directory_mock.reset_mock()
+    write_hash_sum_mock.reset_mock()
+
+    # Test situation when saved directory hash == current hash
+    read_hash_sum_mock.return_value = self.DUMMY_HASH
+    count_hash_sum_mock.return_value = self.YA_HASH
+    resource_files_keeper.update_directory_archive(self.SOME_PATH)
+    self.assertTrue(read_hash_sum_mock.called)
+    self.assertTrue(count_hash_sum_mock.called)
+    self.assertTrue(zip_directory_mock.called)
+    self.assertTrue(write_hash_sum_mock.called)
+
+    read_hash_sum_mock.reset_mock()
+    count_hash_sum_mock.reset_mock()
+    zip_directory_mock.reset_mock()
+    write_hash_sum_mock.reset_mock()
+
+    # Test situation when saved directory hash == current hash
+    read_hash_sum_mock.return_value = self.DUMMY_HASH
+    count_hash_sum_mock.return_value = self.DUMMY_HASH
+    resource_files_keeper.update_directory_archive(self.SOME_PATH)
+    self.assertTrue(read_hash_sum_mock.called)
+    self.assertTrue(count_hash_sum_mock.called)
+    self.assertFalse(zip_directory_mock.called)
+    self.assertFalse(write_hash_sum_mock.called)
+
+    read_hash_sum_mock.reset_mock()
+    count_hash_sum_mock.reset_mock()
+    zip_directory_mock.reset_mock()
+    write_hash_sum_mock.reset_mock()
+
+    # Check that no saved hash file is created when zipping failed
+    zip_directory_mock.side_effect = self.keeper_exc_side_effect
+    read_hash_sum_mock.return_value = self.DUMMY_HASH
+    count_hash_sum_mock.return_value = self.YA_HASH
+    try:
+      resource_files_keeper.update_directory_archive(self.SOME_PATH)
+      self.fail('KeeperException not thrown')
+    except KeeperException:
+      pass # Expected
+    except Exception, e:
+      self.fail('Unexpected exception thrown:' + str(e))
+    self.assertTrue(read_hash_sum_mock.called)
+    self.assertTrue(count_hash_sum_mock.called)
+    self.assertTrue(zip_directory_mock.called)
+    self.assertFalse(write_hash_sum_mock.called)
+
+
+  def test_count_hash_sum(self):
+    # Test normal flow
+    resource_files_keeper = ResourceFilesKeeper(self.DUMMY_UNCHANGEABLE_PACKAGE)
+    test_dir = os.path.join(self.DUMMY_UNCHANGEABLE_PACKAGE)
+    hash_sum = resource_files_keeper.count_hash_sum(test_dir)
+    self.assertEquals(hash_sum, self.DUMMY_UNCHANGEABLE_PACKAGE_HASH)
+
+    # Test exception handling
+    with patch("__builtin__.open") as open_mock:
+      open_mock.side_effect = self.exc_side_effect
+      try:
+        resource_files_keeper.count_hash_sum(test_dir)
+        self.fail('KeeperException not thrown')
+      except KeeperException:
+        pass # Expected
+      except Exception, e:
+        self.fail('Unexpected exception thrown:' + str(e))
+
+
+  def test_read_hash_sum(self):
+    resource_files_keeper = ResourceFilesKeeper(self.DUMMY_UNCHANGEABLE_PACKAGE)
+    hash_sum = resource_files_keeper.read_hash_sum(self.DUMMY_UNCHANGEABLE_PACKAGE)
+    self.assertEquals(hash_sum, "dummy_hash")
+
+    # Test exception handling
+    # If file exists, should rethrow exception
+    with patch("os.path.isfile") as isfile_mock:
+      isfile_mock.return_value = True
+      with patch("__builtin__.open") as open_mock:
+        open_mock.side_effect = self.exc_side_effect
+        try:
+          resource_files_keeper.read_hash_sum("path-to-directory")
+          self.fail('KeeperException not thrown')
+        except KeeperException:
+          pass # Expected
+        except Exception, e:
+          self.fail('Unexpected exception thrown:' + str(e))
+
+    # Test exception handling
+    # If file does not exist, should ignore exception
+    with patch("os.path.isfile") as isfile_mock:
+      isfile_mock.return_value = False
+      with patch("__builtin__.open") as open_mock:
+        open_mock.side_effect = self.exc_side_effect
+        res = resource_files_keeper.read_hash_sum("path-to-directory")
+        self.assertEqual(res, None)
+
+
+  def test_write_hash_sum(self):
+    NEW_HASH = "new_hash"
+    resource_files_keeper = ResourceFilesKeeper(self.DUMMY_UNCHANGEABLE_PACKAGE)
+    resource_files_keeper.write_hash_sum(
+      self.DUMMY_UNCHANGEABLE_PACKAGE, NEW_HASH)
+    hash_sum = resource_files_keeper.read_hash_sum(self.DUMMY_UNCHANGEABLE_PACKAGE)
+    self.assertEquals(hash_sum, NEW_HASH)
+
+    # Revert to previous value
+    resource_files_keeper.write_hash_sum(
+      self.DUMMY_UNCHANGEABLE_PACKAGE, self.DUMMY_HASH)
+    hash_sum = resource_files_keeper.read_hash_sum(self.DUMMY_UNCHANGEABLE_PACKAGE)
+    self.assertEquals(hash_sum, self.DUMMY_HASH)
+
+    # Test exception handling
+    with patch("__builtin__.open") as open_mock:
+      open_mock.side_effect = self.exc_side_effect
+      try:
+        resource_files_keeper.write_hash_sum("path-to-directory", self.DUMMY_HASH)
+        self.fail('KeeperException not thrown')
+      except KeeperException:
+        pass # Expected
+      except Exception, e:
+        self.fail('Unexpected exception thrown:' + str(e))
+
+
+  def test_zip_directory(self):
+    # Test normal flow
+    resource_files_keeper = ResourceFilesKeeper(self.DUMMY_UNCHANGEABLE_PACKAGE)
+    resource_files_keeper.zip_directory(self.DUMMY_UNCHANGEABLE_PACKAGE)
+    arc_file = os.path.join(self.DUMMY_UNCHANGEABLE_PACKAGE,
+                            ResourceFilesKeeper.ARCHIVE_NAME)
+    # Arc file should not be empty
+    arc_size=os.path.getsize(arc_file)
+    self.assertTrue(40000 < arc_size < 50000)
+    # After creating zip, count hash sum of dir (should not change)
+    hash_val = resource_files_keeper.count_hash_sum(self.DUMMY_UNCHANGEABLE_PACKAGE)
+    self.assertEquals(hash_val, self.DUMMY_UNCHANGEABLE_PACKAGE_HASH)
+    # Remove arc file
+    os.unlink(arc_file)
+
+    # Test exception handling
+    with patch("os.path.join") as join_mock:
+      join_mock.side_effect = self.exc_side_effect
+      try:
+        resource_files_keeper.zip_directory("path-to-directory")
+        self.fail('KeeperException not thrown')
+      except KeeperException:
+        pass # Expected
+      except Exception, e:
+        self.fail('Unexpected exception thrown:' + str(e))
+
+
+  def test_is_ignored(self):
+    resource_files_keeper = ResourceFilesKeeper(self.DUMMY_UNCHANGEABLE_PACKAGE)
+    self.assertTrue(resource_files_keeper.is_ignored(".hash"))
+    self.assertTrue(resource_files_keeper.is_ignored("archive.zip"))
+    self.assertTrue(resource_files_keeper.is_ignored("dummy.pyc"))
+    self.assertFalse(resource_files_keeper.is_ignored("dummy.py"))
+    self.assertFalse(resource_files_keeper.is_ignored("1.sh"))
+
+
+  def test_is_active_stack(self):
+    # Test normal flow
+    resource_files_keeper = ResourceFilesKeeper(self.DUMMY_UNCHANGEABLE_PACKAGE)
+    self.assertTrue(
+      resource_files_keeper.is_active_stack(
+        os.path.join(self.DUMMY_ACTIVE_STACK, ResourceFilesKeeper.METAINFO_XML)))
+    self.assertFalse(
+      resource_files_keeper.is_active_stack(
+        os.path.join(self.DUMMY_INACTIVE_STACK, ResourceFilesKeeper.METAINFO_XML)))
+    # Test exception handling
+    with patch("xml.dom.minidom.parse") as parse_mock:
+      parse_mock.side_effect = self.exc_side_effect
+      try:
+        resource_files_keeper.is_active_stack("path-to-xml")
+        self.fail('KeeperException not thrown')
+      except KeeperException:
+        pass # Expected
+      except Exception, e:
+        self.fail('Unexpected exception thrown:' + str(e))
+
+
+
+  def exc_side_effect(self, *a):
+    raise Exception("horrible_exc")
+
+
+  def keeper_exc_side_effect(self, *a):
+    raise KeeperException("horrible_keeper_exc")
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-server/src/test/resources/TestAmbaryServer.samples/active_stack/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/TestAmbaryServer.samples/active_stack/metainfo.xml b/ambari-server/src/test/resources/TestAmbaryServer.samples/active_stack/metainfo.xml
new file mode 100644
index 0000000..3c7f87d
--- /dev/null
+++ b/ambari-server/src/test/resources/TestAmbaryServer.samples/active_stack/metainfo.xml
@@ -0,0 +1,23 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+    <versions>
+	  <active>true</active>
+    </versions>
+    <extends>2.0.6</extends>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/configuration/hive-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/configuration/hive-site.xml b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/configuration/hive-site.xml
new file mode 100644
index 0000000..a69e250
--- /dev/null
+++ b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/configuration/hive-site.xml
@@ -0,0 +1,267 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements. See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License. You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+
+<configuration>
+
+  <property>
+    <name>ambari.hive.db.schema.name</name>
+    <value>hive</value>
+    <description>Database name used as the Hive Metastore</description>
+  </property>
+
+  <property>
+    <name>javax.jdo.option.ConnectionURL</name>
+    <value>jdbc</value>
+    <description>JDBC connect string for a JDBC metastore</description>
+  </property>
+
+  <property>
+      <name>javax.jdo.option.ConnectionDriverName</name>
+    <value>com.mysql.jdbc.Driver</value>
+    <description>Driver class name for a JDBC metastore</description>
+  </property>
+
+  <property>
+    <name>javax.jdo.option.ConnectionUserName</name>
+    <value>hive</value>
+    <description>username to use against metastore database</description>
+  </property>
+
+  <property>
+    <name>javax.jdo.option.ConnectionPassword</name>
+    <value> </value>
+    <description>password to use against metastore database</description>
+  </property>
+
+  <property>
+    <name>hive.metastore.warehouse.dir</name>
+    <value>/apps/hive/warehouse</value>
+    <description>location of default database for the warehouse</description>
+  </property>
+
+  <property>
+    <name>hive.metastore.sasl.enabled</name>
+    <value></value>
+    <description>If true, the metastore thrift interface will be secured with SASL.
+     Clients must authenticate with Kerberos.</description>
+  </property>
+
+  <property>
+    <name>hive.metastore.kerberos.keytab.file</name>
+    <value></value>
+    <description>The path to the Kerberos Keytab file containing the metastore
+     thrift server's service principal.</description>
+  </property>
+
+  <property>
+    <name>hive.metastore.kerberos.principal</name>
+    <value></value>
+    <description>The service principal for the metastore thrift server. The special
+    string _HOST will be replaced automatically with the correct host name.</description>
+  </property>
+
+  <property>
+    <name>hive.metastore.cache.pinobjtypes</name>
+    <value>Table,Database,Type,FieldSchema,Order</value>
+    <description>List of comma separated metastore object types that should be pinned in the cache</description>
+  </property>
+
+  <property>
+    <name>hive.metastore.uris</name>
+    <value>thrift://localhost:9083</value>
+    <description>URI for client to contact metastore server</description>
+  </property>
+
+  <property>
+    <name>hive.semantic.analyzer.factory.impl</name>
+    <value>org.apache.hivealog.cli.HCatSemanticAnalyzerFactory</value>
+    <description>controls which SemanticAnalyzerFactory implemenation class is used by CLI</description>
+  </property>
+
+  <property>
+    <name>hive.metastore.client.socket.timeout</name>
+    <value>60</value>
+    <description>MetaStore Client socket timeout in seconds</description>
+  </property>
+
+  <property>
+    <name>hive.metastore.execute.setugi</name>
+    <value>true</value>
+    <description>In unsecure mode, setting this property to true will cause the metastore to execute DFS operations using the client's reported user and group permissions. Note that this property must be set on both the client and     server sides. Further note that its best effort. If client sets its to true and server sets it to false, client setting will be ignored.</description>
+  </property>
+
+  <property>
+    <name>hive.security.authorization.enabled</name>
+    <value>false</value>
+    <description>enable or disable the hive client authorization</description>
+  </property>
+
+  <property>
+    <name>hive.security.authorization.manager</name>
+    <value>org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider</value>
+    <description>the hive client authorization manager class name.
+    The user defined authorization class should implement interface org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider.  </description>
+  </property>
+
+  <property>
+    <name>hive.security.metastore.authorization.manager</name>
+    <value>org.apache.hadoop.hive.ql.security.authorization.StorageBasedAuthorizationProvider</value>
+    <description>The authorization manager class name to be used in the metastore for authorization. The user-defined authorization class should implement interface org.apache.hadoop.hive.ql.security.authorization.HiveMetastoreAuthorizationProvider.  </description>
+  </property>
+
+  <property>
+    <name>hive.security.authenticator.manager</name>
+    <value>org.apache.hadoop.hive.ql.security.ProxyUserAuthenticator</value>
+    <description>Hive client authenticator manager class name. The user-defined authenticator class should implement interface org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider.  </description>
+  </property>
+
+  <property>
+    <name>hive.server2.enable.doAs</name>
+    <value>true</value>
+  </property>
+
+  <property>
+    <name>fs.hdfs.impl.disable.cache</name>
+    <value>true</value>
+  </property>
+
+  <property>
+    <name>fs.file.impl.disable.cache</name>
+    <value>true</value>
+  </property>
+
+  <property>
+    <name>hive.enforce.bucketing</name>
+    <value>true</value>
+    <description>Whether bucketing is enforced. If true, while inserting into the table, bucketing is enforced.</description>
+  </property>
+
+  <property>
+    <name>hive.enforce.sorting</name>
+    <value>true</value>
+    <description>Whether sorting is enforced. If true, while inserting into the table, sorting is enforced.</description>
+  </property>
+
+  <property>
+    <name>hive.map.aggr</name>
+    <value>true</value>
+    <description>Whether to use map-side aggregation in Hive Group By queries.</description>
+  </property>
+
+  <property>
+    <name>hive.optimize.bucketmapjoin</name>
+    <value>true</value>
+  </property>
+
+  <property>
+    <name>hive.optimize.bucketmapjoin.sortedmerge</name>
+    <value>true</value>
+  </property>
+
+  <property>
+    <name>hive.mapred.reduce.tasks.speculative.execution</name>
+    <value>false</value>
+    <description>Whether speculative execution for reducers should be turned on.</description>
+  </property>
+
+  <property>
+    <name>hive.auto.convert.join</name>
+    <value>true</value>
+    <description>Whether Hive enable the optimization about converting common
+      join into mapjoin based on the input file size.</description>
+  </property>
+
+  <property>
+    <name>hive.auto.convert.sortmerge.join</name>
+    <value>true</value>
+    <description>Will the join be automatically converted to a sort-merge join, if the joined tables pass
+      the criteria for sort-merge join.
+    </description>
+  </property>
+
+  <property>
+    <name>hive.auto.convert.sortmerge.join.noconditionaltask</name>
+    <value>true</value>
+  </property>
+
+  <property>
+    <name>hive.auto.convert.join.noconditionaltask</name>
+    <value>true</value>
+    <description>Whether Hive enable the optimization about converting common join into mapjoin based on the input file
+      size. If this paramater is on, and the sum of size for n-1 of the tables/partitions for a n-way join is smaller than the
+      specified size, the join is directly converted to a mapjoin (there is no conditional task).
+    </description>
+  </property>
+
+  <property>
+    <name>hive.auto.convert.join.noconditionaltask.size</name>
+    <value>1000000000</value>
+    <description>If hive.auto.convert.join.noconditionaltask is off, this parameter does not take affect. However, if it
+      is on, and the sum of size for n-1 of the tables/partitions for a n-way join is smaller than this size, the join is directly
+      converted to a mapjoin(there is no conditional task). The default is 10MB.
+    </description>
+  </property>
+
+  <property>
+    <name>hive.optimize.reducededuplication.min.reducer</name>
+    <value>1</value>
+    <description>Reduce deduplication merges two RSs by moving key/parts/reducer-num of the child RS to parent RS.
+      That means if reducer-num of the child RS is fixed (order by or forced bucketing) and small, it can make very slow, single MR.
+      The optimization will be disabled if number of reducers is less than specified value.
+    </description>
+  </property>
+
+  <property>
+    <name>hive.optimize.mapjoin.mapreduce</name>
+    <value>true</value>
+    <description>If hive.auto.convert.join is off, this parameter does not take
+      affect. If it is on, and if there are map-join jobs followed by a map-reduce
+      job (for e.g a group by), each map-only job is merged with the following
+      map-reduce job.
+    </description>
+  </property>
+
+  <property>
+    <name>hive.mapjoin.bucket.cache.size</name>
+    <value>10000</value>
+    <description>
+      Size per reducer.The default is 1G, i.e if the input size is 10G, it
+      will use 10 reducers.
+    </description>
+  </property>
+
+  <property>
+    <name>hive.vectorized.execution.enabled</name>
+    <value>false</value>
+  </property>
+
+  <property>
+    <name>hive.optimize.reducededuplication</name>
+    <value>true</value>
+  </property>
+
+  <property>
+    <name>hive.optimize.index.filter</name>
+    <value>true</value>
+    <description>
+    Whether to enable automatic use of indexes
+    </description>
+  </property>
+
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/metainfo.xml b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/metainfo.xml
new file mode 100644
index 0000000..15b95fa
--- /dev/null
+++ b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/metainfo.xml
@@ -0,0 +1,47 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <user>root</user>
+  <comment>Data warehouse system for ad-hoc queries &amp; analysis of large datasets and table &amp; storage management
+    service
+  </comment>
+  <version>0.11.0.2.0.5.0</version>
+
+  <components>
+    <component>
+      <name>HIVE_METASTORE</name>
+      <category>MASTER</category>
+    </component>
+    <component>
+      <name>HIVE_SERVER</name>
+      <category>MASTER</category>
+    </component>
+    <component>
+      <name>MYSQL_SERVER</name>
+      <category>MASTER</category>
+    </component>
+    <component>
+      <name>HIVE_CLIENT</name>
+      <category>CLIENT</category>
+    </component>
+  </components>
+  <configuration-dependencies>
+    <config-type>global</config-type>
+    <config-type>hive-site</config-type>
+  </configuration-dependencies>
+</metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/.hash
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/.hash b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/.hash
new file mode 100644
index 0000000..9550dab
--- /dev/null
+++ b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/.hash
@@ -0,0 +1 @@
+dummy_hash
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/files/addMysqlUser.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/files/addMysqlUser.sh b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/files/addMysqlUser.sh
new file mode 100644
index 0000000..8d31b91
--- /dev/null
+++ b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/files/addMysqlUser.sh
@@ -0,0 +1,41 @@
+#!/bin/sh
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+mysqldservice=$1
+mysqldbuser=$2
+mysqldbpasswd=$3
+mysqldbhost=$4
+myhostname=$(hostname -f)
+
+service $mysqldservice start
+echo "Adding user $mysqldbuser@$mysqldbhost and $mysqldbuser@localhost"
+mysql -u root -e "CREATE USER '$mysqldbuser'@'$mysqldbhost' IDENTIFIED BY '$mysqldbpasswd';"
+mysql -u root -e "CREATE USER '$mysqldbuser'@'localhost' IDENTIFIED BY '$mysqldbpasswd';"
+mysql -u root -e "GRANT ALL PRIVILEGES ON *.* TO '$mysqldbuser'@'$mysqldbhost';"
+mysql -u root -e "GRANT ALL PRIVILEGES ON *.* TO '$mysqldbuser'@'localhost';"
+if [ '$(mysql -u root -e "select user from mysql.user where user='$mysqldbuser' and host='$myhostname'" | grep "$mysqldbuser")' != '0' ]; then
+  echo "Adding user $mysqldbuser@$myhostname";
+  mysql -u root -e "CREATE USER '$mysqldbuser'@'$myhostname' IDENTIFIED BY '$mysqldbpasswd';";
+  mysql -u root -e "GRANT ALL PRIVILEGES ON *.* TO '$mysqldbuser'@'$myhostname';";
+fi
+mysql -u root -e "flush privileges;"
+service $mysqldservice stop

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/files/hcatSmoke.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/files/hcatSmoke.sh b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/files/hcatSmoke.sh
new file mode 100644
index 0000000..9e7b33f
--- /dev/null
+++ b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/files/hcatSmoke.sh
@@ -0,0 +1,35 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+export tablename=$1
+
+case "$2" in
+
+prepare)
+  hcat -e "show tables"
+  hcat -e "drop table IF EXISTS ${tablename}"
+  hcat -e "create table ${tablename} ( id INT, name string ) stored as rcfile ;"
+;;
+
+cleanup)
+  hcat -e "drop table IF EXISTS ${tablename}"
+;;
+
+esac

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/files/hiveSmoke.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/files/hiveSmoke.sh b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/files/hiveSmoke.sh
new file mode 100644
index 0000000..7e03524
--- /dev/null
+++ b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/files/hiveSmoke.sh
@@ -0,0 +1,23 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+export tablename=$1
+echo "CREATE EXTERNAL TABLE IF NOT EXISTS ${tablename} ( foo INT, bar STRING );" | hive
+echo "DESCRIBE ${tablename};" | hive

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/files/hiveserver2.sql
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/files/hiveserver2.sql b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/files/hiveserver2.sql
new file mode 100644
index 0000000..99a3865
--- /dev/null
+++ b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/files/hiveserver2.sql
@@ -0,0 +1,23 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+CREATE EXTERNAL TABLE IF NOT EXISTS hiveserver2smoke20408 ( foo INT, bar STRING );
+DESCRIBE hiveserver2smoke20408;

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/files/hiveserver2Smoke.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/files/hiveserver2Smoke.sh b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/files/hiveserver2Smoke.sh
new file mode 100644
index 0000000..051a21e
--- /dev/null
+++ b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/files/hiveserver2Smoke.sh
@@ -0,0 +1,31 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+smokeout=`/usr/lib/hive/bin/beeline -u $1 -n fakeuser -p fakepwd -d org.apache.hive.jdbc.HiveDriver -e '!run $2' 2>&1| awk '{print}'|grep Error`
+
+if [ "x$smokeout" == "x" ]; then
+  echo "Smoke test of hiveserver2 passed"
+  exit 0
+else
+  echo "Smoke test of hiveserver2 wasnt passed"
+  echo $smokeout
+  exit 1
+fi

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/files/pigSmoke.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/files/pigSmoke.sh b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/files/pigSmoke.sh
new file mode 100644
index 0000000..2e90ac0
--- /dev/null
+++ b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/files/pigSmoke.sh
@@ -0,0 +1,18 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License
+
+A = load 'passwd' using PigStorage(':');
+B = foreach A generate \$0 as id;
+store B into 'pigsmoke.out';

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/files/startHiveserver2.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/files/startHiveserver2.sh b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/files/startHiveserver2.sh
new file mode 100644
index 0000000..fa90c2f
--- /dev/null
+++ b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/files/startHiveserver2.sh
@@ -0,0 +1,22 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+HIVE_CONF_DIR=$4 /usr/lib/hive/bin/hiveserver2 -hiveconf hive.metastore.uris=' ' > $1 2> $2 &
+echo $!|cat>$3

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/files/startMetastore.sh
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/files/startMetastore.sh b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/files/startMetastore.sh
new file mode 100644
index 0000000..9350776
--- /dev/null
+++ b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/files/startMetastore.sh
@@ -0,0 +1,22 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+HIVE_CONF_DIR=$4 hive --service metastore > $1 2> $2 &
+echo $!|cat>$3

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/__init__.py b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/__init__.py
new file mode 100644
index 0000000..5561e10
--- /dev/null
+++ b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/__init__.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/hcat.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/hcat.py b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/hcat.py
new file mode 100644
index 0000000..2993d3a
--- /dev/null
+++ b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/hcat.py
@@ -0,0 +1,47 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+import sys
+
+
+def hcat():
+  import params
+
+  Directory(params.hcat_conf_dir,
+            owner=params.hcat_user,
+            group=params.user_group,
+  )
+
+  Directory(params.hcat_pid_dir,
+            owner=params.webhcat_user,
+            recursive=True
+  )
+
+  hcat_TemplateConfig('hcat-env.sh')
+
+
+def hcat_TemplateConfig(name):
+  import params
+
+  TemplateConfig(format("{hcat_conf_dir}/{name}"),
+                 owner=params.hcat_user,
+                 group=params.user_group
+  )

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/hcat_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/hcat_client.py b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/hcat_client.py
new file mode 100644
index 0000000..8b5921a
--- /dev/null
+++ b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/hcat_client.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import sys
+from resource_management import *
+from hcat import hcat
+
+class HCatClient(Script):
+  def install(self, env):
+    self.install_packages(env)
+    self.configure(env)
+
+  def configure(self, env):
+    import params
+
+    env.set_params(params)
+
+    hcat()
+
+
+  def status(self, env):
+    raise ClientComponentHasNoStatus()
+
+
+if __name__ == "__main__":
+  HCatClient().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/hcat_service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/hcat_service_check.py b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/hcat_service_check.py
new file mode 100644
index 0000000..5112e99
--- /dev/null
+++ b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/hcat_service_check.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+
+def hcat_service_check():
+    import params
+
+    unique = get_unique_id_and_date()
+    output_file = format("/apps/hive/warehouse/hcatsmoke{unique}")
+    test_cmd = format("fs -test -e {output_file}")
+
+    if params.security_enabled:
+      kinit_cmd = format(
+        "{kinit_path_local} -kt {smoke_user_keytab} {smokeuser}; ")
+    else:
+      kinit_cmd = ""
+
+    File('/tmp/hcatSmoke.sh',
+         content=StaticFile("hcatSmoke.sh"),
+         mode=0755
+    )
+
+    prepare_cmd = format("{kinit_cmd}sh /tmp/hcatSmoke.sh hcatsmoke{unique} prepare")
+
+    Execute(prepare_cmd,
+            tries=3,
+            user=params.smokeuser,
+            try_sleep=5,
+            path=['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
+            logoutput=True)
+
+    ExecuteHadoop(test_cmd,
+                  user=params.hdfs_user,
+                  logoutput=True,
+                  conf_dir=params.hadoop_conf_dir)
+
+    cleanup_cmd = format("{kinit_cmd}sh /tmp/hcatSmoke.sh hcatsmoke{unique} cleanup")
+
+    Execute(cleanup_cmd,
+            tries=3,
+            user=params.smokeuser,
+            try_sleep=5,
+            path=['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
+            logoutput=True
+    )

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/hive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/hive.py b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/hive.py
new file mode 100644
index 0000000..b37ebb2
--- /dev/null
+++ b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/hive.py
@@ -0,0 +1,122 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+import sys
+
+
+def hive(name=None):
+  import params
+
+  if name == 'metastore' or name == 'hiveserver2':
+    hive_config_dir = params.hive_server_conf_dir
+    config_file_mode = 0600
+    jdbc_connector()
+  else:
+    hive_config_dir = params.hive_conf_dir
+    config_file_mode = 0644
+
+  Directory(hive_config_dir,
+            owner=params.hive_user,
+            group=params.user_group,
+            recursive=True
+  )
+
+  XmlConfig("hive-site.xml",
+            conf_dir=hive_config_dir,
+            configurations=params.config['configurations']['hive-site'],
+            owner=params.hive_user,
+            group=params.user_group,
+            mode=config_file_mode
+  )
+
+  cmd = format("/bin/sh -c 'cd /usr/lib/ambari-agent/ && curl -kf --retry 5 "
+               "{jdk_location}{check_db_connection_jar_name} -o {check_db_connection_jar_name}'")
+
+  Execute(cmd,
+          not_if=format("[ -f {check_db_connection_jar_name}]"))
+
+  if name == 'metastore':
+    File(params.start_metastore_path,
+         mode=0755,
+         content=StaticFile('startMetastore.sh')
+    )
+
+  elif name == 'hiveserver2':
+    File(params.start_hiveserver2_path,
+         mode=0755,
+         content=StaticFile('startHiveserver2.sh')
+    )
+
+  if name != "client":
+    crt_directory(params.hive_pid_dir)
+    crt_directory(params.hive_log_dir)
+    crt_directory(params.hive_var_lib)
+
+  File(format("{hive_config_dir}/hive-env.sh"),
+       owner=params.hive_user,
+       group=params.user_group,
+       content=Template('hive-env.sh.j2', conf_dir=hive_config_dir)
+  )
+
+  crt_file(format("{hive_conf_dir}/hive-default.xml.template"))
+  crt_file(format("{hive_conf_dir}/hive-env.sh.template"))
+  crt_file(format("{hive_conf_dir}/hive-exec-log4j.properties.template"))
+  crt_file(format("{hive_conf_dir}/hive-log4j.properties.template"))
+
+
+def crt_directory(name):
+  import params
+
+  Directory(name,
+            recursive=True,
+            owner=params.hive_user,
+            group=params.user_group,
+            mode=0755)
+
+
+def crt_file(name):
+  import params
+
+  File(name,
+       owner=params.hive_user,
+       group=params.user_group
+  )
+
+
+def jdbc_connector():
+  import params
+
+  if params.hive_jdbc_driver == "com.mysql.jdbc.Driver":
+    cmd = format("hive mkdir -p {artifact_dir} ; cp /usr/share/java/{jdbc_jar_name} {target}")
+
+    Execute(cmd,
+            not_if=format("test -f {target}"),
+            creates=params.target,
+            path=["/bin", "usr/bin/"])
+
+  elif params.hive_jdbc_driver == "oracle.jdbc.driver.OracleDriver":
+    cmd = format(
+      "mkdir -p {artifact_dir} ; curl -kf --retry 10 {driver_curl_source} -o {driver_curl_target} &&  "
+      "cp {driver_curl_target} {target}")
+
+    Execute(cmd,
+            not_if=format("test -f {target}"),
+            path=["/bin", "usr/bin/"])

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/hive_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/hive_client.py b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/hive_client.py
new file mode 100644
index 0000000..0a5fb2b
--- /dev/null
+++ b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/hive_client.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+import sys
+from resource_management import *
+
+from hive import hive
+
+class HiveClient(Script):
+  def install(self, env):
+    self.install_packages(env)
+    self.configure(env)
+
+  def configure(self, env):
+    import params
+    env.set_params(params)
+
+    hive(name='client')
+
+
+  def status(self, env):
+    raise ClientComponentHasNoStatus()
+
+if __name__ == "__main__":
+  HiveClient().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/hive_metastore.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/hive_metastore.py b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/hive_metastore.py
new file mode 100644
index 0000000..c741174
--- /dev/null
+++ b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/hive_metastore.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import sys
+from resource_management import *
+
+from hive import hive
+from hive_service import hive_service
+
+class HiveMetastore(Script):
+
+  def install(self, env):
+    self.install_packages(env)
+    self.configure(env)
+
+  def configure(self, env):
+    import params
+    env.set_params(params)
+
+    hive(name='metastore')
+
+  def start(self, env):
+    import params
+    env.set_params(params)
+    self.configure(env) # FOR SECURITY
+    hive_service( 'metastore',
+                   action = 'start'
+    )
+
+  def stop(self, env):
+    import params
+    env.set_params(params)
+
+    hive_service( 'metastore',
+                   action = 'stop'
+    )
+
+  def status(self, env):
+    import status_params
+    env.set_params(status_params)
+    pid_file = format("{hive_pid_dir}/{hive_metastore_pid}")
+    # Recursively check all existing gmetad pid files
+    check_process_status(pid_file)
+
+if __name__ == "__main__":
+  HiveMetastore().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/hive_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/hive_server.py b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/hive_server.py
new file mode 100644
index 0000000..3ad81a1
--- /dev/null
+++ b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/hive_server.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import sys
+from resource_management import *
+
+from hive import hive
+from hive_service import hive_service
+
+class HiveServer(Script):
+
+  def install(self, env):
+    self.install_packages(env)
+    self.configure(env)
+  def configure(self, env):
+    import params
+    env.set_params(params)
+
+    hive(name='hiveserver2')
+
+  def start(self, env):
+    import params
+    env.set_params(params)
+    self.configure(env) # FOR SECURITY
+    hive_service( 'hiveserver2',
+                  action = 'start'
+    )
+
+  def stop(self, env):
+    import params
+    env.set_params(params)
+
+    hive_service( 'hiveserver2',
+                  action = 'stop'
+    )
+
+
+  def status(self, env):
+    import status_params
+    env.set_params(status_params)
+    pid_file = format("{hive_pid_dir}/{hive_pid}")
+    # Recursively check all existing gmetad pid files
+    check_process_status(pid_file)
+
+if __name__ == "__main__":
+  HiveServer().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/hive_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/hive_service.py b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/hive_service.py
new file mode 100644
index 0000000..e8d4e5c
--- /dev/null
+++ b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/hive_service.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+
+
+def hive_service(
+    name,
+    action='start'):
+
+  import params
+
+  if name == 'metastore':
+    pid_file = format("{hive_pid_dir}/{hive_metastore_pid}")
+    cmd = format(
+      "env HADOOP_HOME={hadoop_home} JAVA_HOME={java64_home} {start_metastore_path} {hive_log_dir}/hive.out {hive_log_dir}/hive.log {pid_file} {hive_server_conf_dir}")
+  elif name == 'hiveserver2':
+    pid_file = format("{hive_pid_dir}/{hive_pid}")
+    cmd = format(
+      "env JAVA_HOME={java64_home} {start_hiveserver2_path} {hive_log_dir}/hive-server2.out {hive_log_dir}/hive-server2.log {pid_file} {hive_server_conf_dir}")
+
+  if action == 'start':
+    demon_cmd = format("{cmd}")
+    no_op_test = format("ls {pid_file} >/dev/null 2>&1 && ps `cat {pid_file}` >/dev/null 2>&1")
+    Execute(demon_cmd,
+            user=params.hive_user,
+            not_if=no_op_test
+    )
+
+    if params.hive_jdbc_driver == "com.mysql.jdbc.Driver" or params.hive_jdbc_driver == "oracle.jdbc.driver.OracleDriver":
+      db_connection_check_command = format(
+        "{java64_home}/bin/java -cp {check_db_connection_jar}:/usr/share/java/{jdbc_jar_name} org.apache.ambari.server.DBConnectionVerification {hive_jdbc_connection_url} {hive_metastore_user_name} {hive_metastore_user_passwd} {hive_jdbc_driver}")
+      Execute(db_connection_check_command,
+              path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin')
+
+  elif action == 'stop':
+    demon_cmd = format("kill `cat {pid_file}` >/dev/null 2>&1 && rm -f {pid_file}")
+    Execute(demon_cmd)
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/mysql_server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/mysql_server.py b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/mysql_server.py
new file mode 100644
index 0000000..a160d4b
--- /dev/null
+++ b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/mysql_server.py
@@ -0,0 +1,77 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+import sys
+from resource_management import *
+
+from mysql_service import mysql_service
+
+class MysqlServer(Script):
+
+  if System.get_instance().os_family == "suse":
+    daemon_name = 'mysql'
+  else:
+    daemon_name = 'mysqld'
+
+  def install(self, env):
+    self.install_packages(env)
+    self.configure(env)
+
+  def configure(self, env):
+    import params
+    env.set_params(params)
+
+    mysql_service(daemon_name=self.daemon_name, action='start')
+
+    File(params.mysql_adduser_path,
+         mode=0755,
+         content=StaticFile('addMysqlUser.sh')
+    )
+
+    # Autoescaping
+    cmd = ("bash", "-x", params.mysql_adduser_path, self.daemon_name,
+           params.hive_metastore_user_name, params.hive_metastore_user_passwd, params.mysql_host[0])
+
+    Execute(cmd,
+            tries=3,
+            try_sleep=5,
+            path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin',
+            logoutput=True
+    )
+
+    mysql_service(daemon_name=self.daemon_name, action='stop')
+
+  def start(self, env):
+    import params
+    env.set_params(params)
+
+    mysql_service(daemon_name=self.daemon_name, action = 'start')
+
+  def stop(self, env):
+    import params
+    env.set_params(params)
+
+    mysql_service(daemon_name=self.daemon_name, action = 'stop')
+
+  def status(self, env):
+    mysql_service(daemon_name=self.daemon_name, action = 'status')
+
+if __name__ == "__main__":
+  MysqlServer().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/mysql_service.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/mysql_service.py b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/mysql_service.py
new file mode 100644
index 0000000..4716343
--- /dev/null
+++ b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/mysql_service.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+
+
+def mysql_service(daemon_name=None, action='start'):
+
+  logoutput=True
+  if action == 'start':
+    cmd = format('service {daemon_name} start')
+  elif action == 'stop':
+    cmd = format('service {daemon_name} stop')
+  elif action == 'status':
+    cmd = format('service {daemon_name} status')
+    logoutput = False
+  else:
+    cmd = None
+
+  if cmd is not None:
+    Execute(cmd,
+            path="/usr/local/bin/:/bin/:/sbin/",
+            tries=1,
+            logoutput=logoutput)
+
+
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/params.py b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/params.py
new file mode 100644
index 0000000..0cf89be
--- /dev/null
+++ b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/params.py
@@ -0,0 +1,123 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+import status_params
+
+# server configurations
+config = Script.get_config()
+
+hive_metastore_user_name = config['configurations']['hive-site']['javax.jdo.option.ConnectionUserName']
+hive_server_conf_dir = "/etc/hive/conf.server"
+hive_jdbc_connection_url = config['configurations']['hive-site']['javax.jdo.option.ConnectionURL']
+
+hive_metastore_user_passwd = config['configurations']['hive-site']['javax.jdo.option.ConnectionPassword']
+
+#users
+hive_user = config['configurations']['global']['hive_user']
+hive_lib = '/usr/lib/hive/lib/'
+#JDBC driver jar name
+hive_jdbc_driver = default('hive_jdbc_driver', 'com.mysql.jdbc.Driver')
+if hive_jdbc_driver == "com.mysql.jdbc.Driver":
+  jdbc_jar_name = "mysql-connector-java.jar"
+elif hive_jdbc_driver == "oracle.jdbc.driver.OracleDriver":
+  jdbc_jar_name = "ojdbc6.jar"
+
+check_db_connection_jar_name = "DBConnectionVerification.jar"
+check_db_connection_jar = format("/usr/lib/ambari-agent/{check_db_connection_jar_name}")
+
+#common
+hive_metastore_port = config['configurations']['global']['hive_metastore_port']
+hive_var_lib = '/var/lib/hive'
+hive_server_host = config['clusterHostInfo']['hive_server_host']
+hive_url = format("jdbc:hive2://{hive_server_host}:10000")
+
+smokeuser = config['configurations']['global']['smokeuser']
+smoke_test_sql = "/tmp/hiveserver2.sql"
+smoke_test_path = "/tmp/hiveserver2Smoke.sh"
+smoke_user_keytab = config['configurations']['global']['smokeuser_keytab']
+
+security_enabled = config['configurations']['global']['security_enabled']
+
+kinit_path_local = get_kinit_path([default("kinit_path_local",None), "/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
+hive_metastore_keytab_path =  config['configurations']['hive-site']['hive.metastore.kerberos.keytab.file']
+
+#hive_env
+hive_conf_dir = "/etc/hive/conf"
+hive_dbroot = config['configurations']['global']['hive_dbroot']
+hive_log_dir = config['configurations']['global']['hive_log_dir']
+hive_pid_dir = status_params.hive_pid_dir
+hive_pid = status_params.hive_pid
+
+#hive-site
+hive_database_name = config['configurations']['global']['hive_database_name']
+
+#Starting hiveserver2
+start_hiveserver2_script = 'startHiveserver2.sh'
+
+hadoop_home = '/usr'
+
+##Starting metastore
+start_metastore_script = 'startMetastore.sh'
+hive_metastore_pid = status_params.hive_metastore_pid
+java_share_dir = '/usr/share/java'
+driver_curl_target = format("{java_share_dir}/{jdbc_jar_name}")
+
+hdfs_user =  config['configurations']['global']['hdfs_user']
+user_group = config['configurations']['global']['user_group']
+artifact_dir = "/tmp/HDP-artifacts/"
+
+target = format("{hive_lib}/{jdbc_jar_name}")
+
+jdk_location = config['hostLevelParams']['jdk_location']
+driver_curl_source = format("{jdk_location}/{jdbc_jar_name}")
+
+start_hiveserver2_path = "/tmp/start_hiveserver2_script"
+start_metastore_path = "/tmp/start_metastore_script"
+
+hive_aux_jars_path = config['configurations']['global']['hive_aux_jars_path']
+hadoop_heapsize = config['configurations']['global']['hadoop_heapsize']
+java64_home = config['hostLevelParams']['java_home']
+
+##### MYSQL
+
+db_name = config['configurations']['global']['hive_database_name']
+mysql_user = "mysql"
+mysql_group = 'mysql'
+mysql_host = config['clusterHostInfo']['hive_mysql_host']
+
+mysql_adduser_path = "/tmp/addMysqlUser.sh"
+
+########## HCAT
+
+hcat_conf_dir = '/etc/hcatalog/conf'
+
+metastore_port = 9933
+hcat_lib = '/usr/lib/hcatalog/share/hcatalog'
+
+hcat_dbroot = hcat_lib
+
+hcat_user = config['configurations']['global']['hcat_user']
+webhcat_user = config['configurations']['global']['webhcat_user']
+
+hcat_pid_dir = status_params.hcat_pid_dir
+hcat_log_dir = config['configurations']['global']['hcat_log_dir']   #hcat_log_dir
+
+hadoop_conf_dir = '/etc/hadoop/conf'

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/service_check.py b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/service_check.py
new file mode 100644
index 0000000..111e8a1
--- /dev/null
+++ b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/service_check.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+
+from hcat_service_check import hcat_service_check
+
+class HiveServiceCheck(Script):
+  def service_check(self, env):
+    import params
+    env.set_params(params)
+    if params.security_enabled:
+      kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} {smokeuser};")
+      hive_principal_ext = format("principal={hive_metastore_keytab_path}")
+      hive_url_ext = format("{hive_url}/\\;{hive_principal_ext}")
+      smoke_cmd = format("{kinit_cmd} env JAVA_HOME={java64_home} {smoke_test_path} {hive_url_ext} {smoke_test_sql}")
+    else:
+      smoke_cmd = format("env JAVA_HOME={java64_home} {smoke_test_path} {hive_url} {smoke_test_sql}")
+
+    File(params.smoke_test_path,
+         content=StaticFile('hiveserver2Smoke.sh'),
+         mode=0755
+    )
+
+    File(params.smoke_test_sql,
+         content=StaticFile('hiveserver2.sql')
+    )
+
+    Execute(smoke_cmd,
+            tries=3,
+            try_sleep=5,
+            path='/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin',
+            logoutput=True,
+            user=params.smokeuser)
+
+    hcat_service_check()
+
+if __name__ == "__main__":
+  HiveServiceCheck().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/status_params.py b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/status_params.py
new file mode 100644
index 0000000..7770975
--- /dev/null
+++ b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/scripts/status_params.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+"""
+
+from resource_management import *
+
+config = Script.get_config()
+
+hive_pid_dir = config['configurations']['global']['hive_pid_dir']
+hive_pid = 'hive-server.pid'
+
+hive_metastore_pid = 'hive.pid'
+
+hcat_pid_dir = config['configurations']['global']['hcat_pid_dir'] #hcat_pid_dir

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/templates/hcat-env.sh.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/templates/hcat-env.sh.j2 b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/templates/hcat-env.sh.j2
new file mode 100644
index 0000000..2a35240
--- /dev/null
+++ b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/templates/hcat-env.sh.j2
@@ -0,0 +1,25 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+JAVA_HOME={{java64_home}}
+HCAT_PID_DIR={{hcat_pid_dir}}/
+HCAT_LOG_DIR={{hcat_log_dir}}/
+HCAT_CONF_DIR={{hcat_conf_dir}}
+HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}
+#DBROOT is the path where the connector jars are downloaded
+DBROOT={{hcat_dbroot}}
+USER={{hcat_user}}
+METASTORE_PORT={{metastore_port}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/templates/hive-env.sh.j2
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/templates/hive-env.sh.j2 b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/templates/hive-env.sh.j2
new file mode 100644
index 0000000..548262a
--- /dev/null
+++ b/ambari-server/src/test/resources/TestAmbaryServer.samples/dummy_stack/HIVE/package/templates/hive-env.sh.j2
@@ -0,0 +1,55 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Set Hive and Hadoop environment variables here. These variables can be used
+# to control the execution of Hive. It should be used by admins to configure
+# the Hive installation (so that users do not have to set environment variables
+# or set command line parameters to get correct behavior).
+#
+# The hive service being invoked (CLI/HWI etc.) is available via the environment
+# variable SERVICE
+
+# Hive Client memory usage can be an issue if a large number of clients
+# are running at the same time. The flags below have been useful in
+# reducing memory usage:
+#
+ if [ "$SERVICE" = "cli" ]; then
+   if [ -z "$DEBUG" ]; then
+     export HADOOP_OPTS="$HADOOP_OPTS -XX:NewRatio=12 -Xms10m -XX:MaxHeapFreeRatio=40 -XX:MinHeapFreeRatio=15 -XX:+UseParNewGC -XX:-UseGCOverheadLimit"
+   else
+     export HADOOP_OPTS="$HADOOP_OPTS -XX:NewRatio=12 -Xms10m -XX:MaxHeapFreeRatio=40 -XX:MinHeapFreeRatio=15 -XX:-UseGCOverheadLimit"
+   fi
+ fi
+
+# The heap size of the jvm stared by hive shell script can be controlled via:
+
+export HADOOP_HEAPSIZE="{{hadoop_heapsize}}"
+export HADOOP_CLIENT_OPTS="-Xmx${HADOOP_HEAPSIZE}m $HADOOP_CLIENT_OPTS"
+
+# Larger heap size may be required when running queries over large number of files or partitions.
+# By default hive shell scripts use a heap size of 256 (MB).  Larger heap size would also be
+# appropriate for hive server (hwi etc).
+
+
+# Set HADOOP_HOME to point to a specific hadoop install directory
+HADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}
+
+# Hive Configuration Directory can be controlled by:
+export HIVE_CONF_DIR={{conf_dir}}
+
+# Folder containing extra ibraries required for hive compilation/execution can be controlled by:
+# export HIVE_AUX_JARS_PATH=
+export HIVE_AUX_JARS_PATH={{hive_aux_jars_path}}

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-server/src/test/resources/TestAmbaryServer.samples/inactive_stack/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/resources/TestAmbaryServer.samples/inactive_stack/metainfo.xml b/ambari-server/src/test/resources/TestAmbaryServer.samples/inactive_stack/metainfo.xml
new file mode 100644
index 0000000..b7d8766
--- /dev/null
+++ b/ambari-server/src/test/resources/TestAmbaryServer.samples/inactive_stack/metainfo.xml
@@ -0,0 +1,23 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+    <versions>
+	  <active>false</active>
+    </versions>
+    <extends>2.0.6</extends>
+</metainfo>


[2/2] git commit: AMBARI-4481. Add to the agent ability to download service scripts and hooks (dlysnichenko)

Posted by dm...@apache.org.
AMBARI-4481. Add to the agent ability to download service scripts and hooks (dlysnichenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/02f9c453
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/02f9c453
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/02f9c453

Branch: refs/heads/trunk
Commit: 02f9c45311ee6f6e7d87fe07b0059ad43a265782
Parents: f2c18bc
Author: Lisnichenko Dmitro <dl...@hortonworks.com>
Authored: Fri Jan 31 15:33:57 2014 +0200
Committer: Lisnichenko Dmitro <dl...@hortonworks.com>
Committed: Tue Feb 18 23:03:50 2014 +0200

----------------------------------------------------------------------
 ambari-agent/conf/unix/ambari-agent.ini         |   4 +-
 ambari-agent/pom.xml                            |   3 +-
 .../src/main/python/ambari_agent/ActionQueue.py |   4 +-
 .../src/main/python/ambari_agent/Controller.py  |  15 +-
 .../ambari_agent/CustomServiceOrchestrator.py   |  18 +-
 .../src/main/python/ambari_agent/FileCache.py   | 211 ++++++++++--
 .../test/python/ambari_agent/TestActionQueue.py |  44 ++-
 .../test/python/ambari_agent/TestController.py  |  27 +-
 .../TestCustomServiceOrchestrator.py            |  66 +++-
 .../test/python/ambari_agent/TestFileCache.py   | 320 +++++++++++++++++--
 .../test/python/ambari_agent/TestHeartbeat.py   |  35 +-
 .../ambari_agent/dummy_files/dummy_archive.zip  | Bin 0 -> 29558 bytes
 ambari-server/pom.xml                           |  16 +-
 .../ambari/server/agent/HeartbeatMonitor.java   |   6 +-
 .../AmbariCustomCommandExecutionHelper.java     |   4 +-
 .../ambari/server/controller/AmbariServer.java  |   4 +-
 ambari-server/src/main/python/ambari-server.py  |  29 +-
 .../src/main/python/ambari_server/__init__.py   |  21 ++
 .../python/ambari_server/resourceFilesKeeper.py | 258 +++++++++++++++
 .../src/test/python/TestAmbariServer.py         |  20 +-
 .../src/test/python/TestResourceFilesKeeper.py  | 319 ++++++++++++++++++
 .../active_stack/metainfo.xml                   |  23 ++
 .../HIVE/configuration/hive-site.xml            | 267 ++++++++++++++++
 .../dummy_stack/HIVE/metainfo.xml               |  47 +++
 .../dummy_stack/HIVE/package/.hash              |   1 +
 .../HIVE/package/files/addMysqlUser.sh          |  41 +++
 .../dummy_stack/HIVE/package/files/hcatSmoke.sh |  35 ++
 .../dummy_stack/HIVE/package/files/hiveSmoke.sh |  23 ++
 .../HIVE/package/files/hiveserver2.sql          |  23 ++
 .../HIVE/package/files/hiveserver2Smoke.sh      |  31 ++
 .../dummy_stack/HIVE/package/files/pigSmoke.sh  |  18 ++
 .../HIVE/package/files/startHiveserver2.sh      |  22 ++
 .../HIVE/package/files/startMetastore.sh        |  22 ++
 .../HIVE/package/scripts/__init__.py            |  19 ++
 .../dummy_stack/HIVE/package/scripts/hcat.py    |  47 +++
 .../HIVE/package/scripts/hcat_client.py         |  43 +++
 .../HIVE/package/scripts/hcat_service_check.py  |  63 ++++
 .../dummy_stack/HIVE/package/scripts/hive.py    | 122 +++++++
 .../HIVE/package/scripts/hive_client.py         |  41 +++
 .../HIVE/package/scripts/hive_metastore.py      |  63 ++++
 .../HIVE/package/scripts/hive_server.py         |  63 ++++
 .../HIVE/package/scripts/hive_service.py        |  56 ++++
 .../HIVE/package/scripts/mysql_server.py        |  77 +++++
 .../HIVE/package/scripts/mysql_service.py       |  44 +++
 .../dummy_stack/HIVE/package/scripts/params.py  | 123 +++++++
 .../HIVE/package/scripts/service_check.py       |  56 ++++
 .../HIVE/package/scripts/status_params.py       |  30 ++
 .../HIVE/package/templates/hcat-env.sh.j2       |  25 ++
 .../HIVE/package/templates/hive-env.sh.j2       |  55 ++++
 .../inactive_stack/metainfo.xml                 |  23 ++
 50 files changed, 2807 insertions(+), 120 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-agent/conf/unix/ambari-agent.ini
----------------------------------------------------------------------
diff --git a/ambari-agent/conf/unix/ambari-agent.ini b/ambari-agent/conf/unix/ambari-agent.ini
index 11d6f1a..27eb6e1 100644
--- a/ambari-agent/conf/unix/ambari-agent.ini
+++ b/ambari-agent/conf/unix/ambari-agent.ini
@@ -25,6 +25,7 @@ data_cleanup_interval=86400
 data_cleanup_max_age=2592000
 ping_port=8670
 cache_dir=/var/lib/ambari-agent/cache
+tolerate_download_failures=true
 
 [puppet]
 puppetmodules=/var/lib/ambari-agent/puppet
@@ -32,9 +33,6 @@ ruby_home=/usr/lib/ambari-agent/lib/ruby-1.8.7-p370
 puppet_home=/usr/lib/ambari-agent/lib/puppet-2.7.9
 facter_home=/usr/lib/ambari-agent/lib/facter-1.6.10
 
-[python]
-custom_actions_dir = /var/lib/ambari-agent/resources/custom_actions
-
 [command]
 maxretries=2
 sleepBetweenRetries=1

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-agent/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-agent/pom.xml b/ambari-agent/pom.xml
index d037d46..12a9910 100644
--- a/ambari-agent/pom.xml
+++ b/ambari-agent/pom.xml
@@ -336,7 +336,6 @@
               </sources>
             </mapping>
             <mapping>
-              <!-- TODO: Remove when we introduce metadata downloading by agent-->
               <directory>/var/lib/ambari-agent/cache/stacks</directory>
               <sources>
                 <source>
@@ -346,7 +345,7 @@
             </mapping>
             <mapping>
               <!-- custom actions root-->
-              <directory>/var/lib/ambari-agent/resources/custom_actions</directory>
+              <directory>/var/lib/ambari-agent/cache/custom_actions</directory>
               <filemode>755</filemode>
               <username>root</username>
               <groupname>root</groupname>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-agent/src/main/python/ambari_agent/ActionQueue.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/ActionQueue.py b/ambari-agent/src/main/python/ambari_agent/ActionQueue.py
index 942cc75..731ac54 100644
--- a/ambari-agent/src/main/python/ambari_agent/ActionQueue.py
+++ b/ambari-agent/src/main/python/ambari_agent/ActionQueue.py
@@ -71,7 +71,9 @@ class ActionQueue(threading.Thread):
     self.sh = shellRunner()
     self._stop = threading.Event()
     self.tmpdir = config.get('agent', 'prefix')
-    self.customServiceOrchestrator = CustomServiceOrchestrator(config)
+    self.customServiceOrchestrator = CustomServiceOrchestrator(config,
+                                                               controller)
+
 
   def stop(self):
     self._stop.set()

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-agent/src/main/python/ambari_agent/Controller.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/Controller.py b/ambari-agent/src/main/python/ambari_agent/Controller.py
index b842b4d..26985ef 100644
--- a/ambari-agent/src/main/python/ambari_agent/Controller.py
+++ b/ambari-agent/src/main/python/ambari_agent/Controller.py
@@ -54,7 +54,8 @@ class Controller(threading.Thread):
     self.credential = None
     self.config = config
     self.hostname = hostname.hostname()
-    server_secured_url = 'https://' + config.get('server', 'hostname') + ':' + config.get('server', 'secured_url_port')
+    server_secured_url = 'https://' + config.get('server', 'hostname') + \
+                         ':' + config.get('server', 'secured_url_port')
     self.registerUrl = server_secured_url + '/agent/v1/register/' + self.hostname
     self.heartbeatUrl = server_secured_url + '/agent/v1/heartbeat/' + self.hostname
     self.netutil = NetUtil()
@@ -67,14 +68,15 @@ class Controller(threading.Thread):
     # Event is used for synchronizing heartbeat iterations (to make possible
     # manual wait() interruption between heartbeats )
     self.heartbeat_wait_event = threading.Event()
+    # List of callbacks that are called at agent registration
+    self.registration_listeners = []
+
 
   def __del__(self):
     logger.info("Server connection disconnected.")
     pass
   
   def registerWithServer(self):
-    retry=False
-    firstTime=True
     id = -1
     ret = {}
 
@@ -257,8 +259,11 @@ class Controller(threading.Thread):
     message = registerResponse['response']
     logger.info("Response from server = " + message)
     if self.isRegistered:
-     time.sleep(self.netutil.HEARTBEAT_IDDLE_INTERVAL_SEC)
-     self.heartbeatWithServer()
+      # Process callbacks
+      for callback in self.registration_listeners:
+        callback()
+      time.sleep(self.netutil.HEARTBEAT_IDDLE_INTERVAL_SEC)
+      self.heartbeatWithServer()
 
   def restartAgent(self):
     os._exit(AGENT_AUTO_RESTART_EXIT_CODE)

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py b/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
index 95ad2cd..7436d26 100644
--- a/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
+++ b/ambari-agent/src/main/python/ambari_agent/CustomServiceOrchestrator.py
@@ -48,7 +48,7 @@ class CustomServiceOrchestrator():
   PRE_HOOK_PREFIX="before"
   POST_HOOK_PREFIX="after"
 
-  def __init__(self, config):
+  def __init__(self, config, controller):
     self.config = config
     self.tmp_dir = config.get('agent', 'prefix')
     self.file_cache = FileCache(config)
@@ -57,6 +57,8 @@ class CustomServiceOrchestrator():
                                                'status_command_stdout.txt')
     self.status_commands_stderr = os.path.join(self.tmp_dir,
                                                'status_command_stderr.txt')
+    # cache reset will be called on every agent registration
+    controller.registration_listeners.append(self.file_cache.reset)
     # Clean up old status command files if any
     try:
       os.unlink(self.status_commands_stdout)
@@ -72,15 +74,10 @@ class CustomServiceOrchestrator():
     command json, is ignored.
     """
     try:
-      try:
-        component_name = command['role']
-      except KeyError:
-        # For status commands and (maybe) custom actions component name
-        # is stored at another location
-        component_name = command['componentName']
       script_type = command['commandParams']['script_type']
       script = command['commandParams']['script']
       timeout = int(command['commandParams']['command_timeout'])
+      server_url_prefix = command['hostLevelParams']['jdk_location']
       task_id = "status"
       try:
         task_id = command['taskId']
@@ -92,15 +89,14 @@ class CustomServiceOrchestrator():
         command_name = forsed_command_name
 
       if command_name == self.CUSTOM_ACTION_COMMAND:
-        base_dir = self.config.get('python', 'custom_actions_dir')
+        base_dir = self.file_cache.get_custom_actions_base_dir(server_url_prefix)
         script_tuple = (os.path.join(base_dir, script) , base_dir)
         hook_dir = None
       else:
         if command_name == self.CUSTOM_COMMAND_COMMAND:
           command_name = command['hostLevelParams']['custom_command']
-        hook_dir = self.file_cache.get_hook_base_dir(command)
-        service_subpath = command['commandParams']['service_package_folder']
-        base_dir = self.file_cache.get_service_base_dir(service_subpath)
+        hook_dir = self.file_cache.get_hook_base_dir(command, server_url_prefix)
+        base_dir = self.file_cache.get_service_base_dir(command, server_url_prefix)
         script_path = self.resolve_script_path(base_dir, script, script_type)
         script_tuple = (script_path, base_dir)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-agent/src/main/python/ambari_agent/FileCache.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/main/python/ambari_agent/FileCache.py b/ambari-agent/src/main/python/ambari_agent/FileCache.py
index 01d2e52..6b307f1 100644
--- a/ambari-agent/src/main/python/ambari_agent/FileCache.py
+++ b/ambari-agent/src/main/python/ambari_agent/FileCache.py
@@ -17,18 +17,19 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 See the License for the specific language governing permissions and
 limitations under the License.
 '''
-
+import StringIO
 
 import logging
-import Queue
-import threading
-import pprint
 import os
-import json
-from AgentException import AgentException
+import shutil
+import zipfile
+import urllib2
 
 logger = logging.getLogger()
 
+class CachingException(Exception):
+  pass
+
 class FileCache():
   """
   Provides caching and lookup for service metadata files.
@@ -36,28 +37,40 @@ class FileCache():
   downloads relevant files from the server.
   """
 
+  STACKS_CACHE_DIRECTORY="stacks"
+  CUSTOM_ACTIONS_CACHE_DIRECTORY="custom_actions"
+  HASH_SUM_FILE=".hash"
+  ARCHIVE_NAME="archive.zip"
+
+  BLOCK_SIZE=1024*16
+  SOCKET_TIMEOUT=10
+
   def __init__(self, config):
     self.service_component_pool = {}
     self.config = config
     self.cache_dir = config.get('agent', 'cache_dir')
+    # Defines whether command should fail when downloading scripts
+    # from the server is not possible or agent should rollback to local copy
+    self.tolerate_download_failures = \
+          config.get('agent','tolerate_download_failures').lower() == 'true'
+    self.reset()
+
+
+  def reset(self):
+    self.uptodate_paths = [] # Paths that already have been recently checked
 
 
-  def get_service_base_dir(self, service_subpath):
+  def get_service_base_dir(self, command, server_url_prefix):
     """
     Returns a base directory for service
     """
-    service_base_dir = os.path.join(self.cache_dir, "stacks", service_subpath)
-    if not os.path.isdir(service_base_dir):
-      # TODO: Metadata downloading will be implemented at Phase 2
-      # As of now, all stack definitions are packaged and distributed with
-      # agent rpm
-      message = "Service base dir not found at expected location {0}".\
-        format(service_base_dir)
-      raise AgentException(message)
-    return service_base_dir
+    service_subpath = command['commandParams']['service_package_folder']
+    subpath = os.path.join(self.STACKS_CACHE_DIRECTORY, service_subpath)
+    return self.provide_directory(self.cache_dir, subpath,
+                                  server_url_prefix)
 
 
-  def get_hook_base_dir(self, command):
+  def get_hook_base_dir(self, command, server_url_prefix):
     """
     Returns a base directory for hooks
     """
@@ -65,13 +78,159 @@ class FileCache():
       hooks_subpath = command['commandParams']['hooks_folder']
     except KeyError:
       return None
-    hook_base_path = os.path.join(self.cache_dir, "stacks", hooks_subpath)
-    if not os.path.isdir(hook_base_path):
-      # TODO: Metadata downloading will be implemented at Phase 2
-      # As of now, all stack definitions are packaged and distributed with
-      # agent rpm
-      message = "Hook scripts dir for not found at " \
-                "expected location {0}".format(hook_base_path)
-      raise AgentException(message)
-    return hook_base_path
+    subpath = os.path.join(self.STACKS_CACHE_DIRECTORY, hooks_subpath)
+    return self.provide_directory(self.cache_dir, subpath,
+                                  server_url_prefix)
+
+
+  def get_custom_actions_base_dir(self, server_url_prefix):
+    """
+    Returns a base directory for custom action scripts
+    """
+    return self.provide_directory(self.cache_dir,
+                                  self.CUSTOM_ACTIONS_CACHE_DIRECTORY,
+                                  server_url_prefix)
+
 
+  def provide_directory(self, cache_path, subdirectory, server_url_prefix):
+    """
+    Ensures that directory at cache is up-to-date. Throws a CachingException
+    if any problems occur
+    Parameters;
+      cache_path: full path to cache directory
+      subdirectory: subpath inside cache
+      server_url_prefix: url of "resources" folder at the server
+    """
+    full_path = os.path.join(cache_path, subdirectory)
+    logger.debug("Trying to provide directory {0}".format(subdirectory))
+    try:
+      if full_path not in self.uptodate_paths:
+        logger.debug("Checking if update is available for "
+                     "directory {0}".format(full_path))
+        # Need to check for updates at server
+        remote_url = self.build_download_url(server_url_prefix,
+                                             subdirectory, self.HASH_SUM_FILE)
+        memory_buffer = self.fetch_url(remote_url)
+        remote_hash = memory_buffer.getvalue().strip()
+        local_hash = self.read_hash_sum(full_path)
+        if not local_hash or local_hash != remote_hash:
+          logger.debug("Updating directory {0}".format(full_path))
+          download_url = self.build_download_url(server_url_prefix,
+                                                 subdirectory, self.ARCHIVE_NAME)
+          membuffer = self.fetch_url(download_url)
+          self.invalidate_directory(full_path)
+          self.unpack_archive(membuffer, full_path)
+          self.write_hash_sum(full_path, remote_hash)
+        # Finally consider cache directory up-to-date
+        self.uptodate_paths.append(full_path)
+    except CachingException, e:
+      if self.tolerate_download_failures:
+        # ignore
+        logger.warn("Error occured during cache update. "
+                    "Error tolerate setting is set to true, so"
+                    " ignoring this error and continuing with current cache. "
+                    "Error details: {0}".format(str(e)))
+      else:
+        raise # we are not tolerant to exceptions, command execution will fail
+    return full_path
+
+
+  def build_download_url(self, server_url_prefix,
+                         directory, filename):
+    """
+    Builds up a proper download url for file. Used for downloading files
+    from the server.
+    directory - relative path
+    filename - file inside directory we are trying to fetch
+    """
+    return "{0}/{1}/{2}".format(server_url_prefix,
+                                    directory, filename)
+
+
+  def fetch_url(self, url):
+    """
+    Fetches content on url to in-memory buffer and returns the resulting buffer.
+    May throw exceptions because of various reasons
+    """
+    logger.debug("Trying to download {0}".format(url))
+    try:
+      memory_buffer = StringIO.StringIO()
+      u = urllib2.urlopen(url, timeout=self.SOCKET_TIMEOUT)
+      logger.debug("Connected with {0} with code {1}".format(u.geturl(),
+                                                             u.getcode()))
+      buff = u.read(self.BLOCK_SIZE)
+      while buff:
+        memory_buffer.write(buff)
+        buff = u.read(self.BLOCK_SIZE)
+        if not buff:
+          break
+      return memory_buffer
+    except Exception, err:
+      raise CachingException("Can not download file from"
+                             " url {0} : {1}".format(url, str(err)))
+
+
+  def read_hash_sum(self, directory):
+    """
+    Tries to read a hash sum from previously generated file. Returns string
+    containing hash or None
+    """
+    hash_file = os.path.join(directory, self.HASH_SUM_FILE)
+    try:
+      with open(hash_file) as fh:
+        return fh.readline().strip()
+    except:
+      return None # We don't care
+
+
+  def write_hash_sum(self, directory, new_hash):
+    """
+    Tries to read a hash sum from previously generated file. Returns string
+    containing hash or None
+    """
+    hash_file = os.path.join(directory, self.HASH_SUM_FILE)
+    try:
+      with open(hash_file, "w") as fh:
+        fh.write(new_hash)
+    except Exception, err:
+      raise CachingException("Can not write to file {0} : {1}".format(hash_file,
+                                                                 str(err)))
+
+
+  def invalidate_directory(self, directory):
+    """
+    Recursively removes directory content (if any). Also, creates
+    directory and any parent directories if needed. May throw exceptions
+    on permission problems
+    """
+    logger.debug("Invalidating directory {0}".format(directory))
+    try:
+      if os.path.isfile(directory): # It would be a strange situation
+        os.unlink(directory)
+      elif os.path.isdir(directory):
+        shutil.rmtree(directory)
+      # create directory itself and any parent directories
+      os.makedirs(directory)
+    except Exception, err:
+      raise CachingException("Can not invalidate cache directory {0}: {1}",
+                             directory, str(err))
+
+
+  def unpack_archive(self, mem_buffer, target_directory):
+    """
+    Unpacks contents of in-memory buffer to file system.
+    In-memory buffer is expected to contain a valid zip archive
+    """
+    try:
+      zfile = zipfile.ZipFile(mem_buffer)
+      for name in zfile.namelist():
+        (dirname, filename) = os.path.split(name)
+        concrete_dir=os.path.abspath(os.path.join(target_directory, dirname))
+        if not os.path.isdir(concrete_dir):
+          os.makedirs(concrete_dir)
+        logger.debug("Unpacking file {0} to {1}".format(name, concrete_dir))
+        zfile.extract(name, target_directory)
+    except Exception, err:
+      raise CachingException("Can not unpack zip file to "
+                             "directory {0} : {1}".format(
+                            target_directory, str(err)))
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-agent/src/test/python/ambari_agent/TestActionQueue.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestActionQueue.py b/ambari-agent/src/test/python/ambari_agent/TestActionQueue.py
index 1918641..abc6edc 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestActionQueue.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestActionQueue.py
@@ -143,8 +143,13 @@ class TestActionQueue(TestCase):
 
   @patch.object(ActionQueue, "process_command")
   @patch.object(Queue, "get")
-  def test_ActionQueueStartStop(self, get_mock, process_command_mock):
-    actionQueue = ActionQueue(AmbariConfig().getConfig(), 'dummy_controller')
+  @patch.object(CustomServiceOrchestrator, "__init__")
+  def test_ActionQueueStartStop(self, CustomServiceOrchestrator_mock,
+                                get_mock, process_command_mock):
+    CustomServiceOrchestrator_mock.return_value = None
+    dummy_controller = MagicMock()
+    config = MagicMock()
+    actionQueue = ActionQueue(config, dummy_controller)
     actionQueue.start()
     time.sleep(0.1)
     actionQueue.stop()
@@ -158,7 +163,8 @@ class TestActionQueue(TestCase):
   @patch.object(ActionQueue, "execute_status_command")
   def test_process_command(self, execute_status_command_mock,
                            execute_command_mock, print_exc_mock):
-    actionQueue = ActionQueue(AmbariConfig().getConfig(), 'dummy_controller')
+    dummy_controller = MagicMock()
+    actionQueue = ActionQueue(AmbariConfig().getConfig(), dummy_controller)
     execution_command = {
       'commandType' : ActionQueue.EXECUTION_COMMAND,
     }
@@ -228,7 +234,10 @@ class TestActionQueue(TestCase):
     config = AmbariConfig().getConfig()
     tempdir = tempfile.gettempdir()
     config.set('agent', 'prefix', tempdir)
-    actionQueue = ActionQueue(config, 'dummy_controller')
+    config.set('agent', 'cache_dir', "/var/lib/ambari-agent/cache")
+    config.set('agent', 'tolerate_download_failures', "true")
+    dummy_controller = MagicMock()
+    actionQueue = ActionQueue(config, dummy_controller)
     unfreeze_flag = threading.Event()
     puppet_execution_result_dict = {
       'stdout': 'out',
@@ -388,7 +397,10 @@ class TestActionQueue(TestCase):
     config = AmbariConfig().getConfig()
     tempdir = tempfile.gettempdir()
     config.set('agent', 'prefix', tempdir)
-    actionQueue = ActionQueue(config, 'dummy_controller')
+    config.set('agent', 'cache_dir', "/var/lib/ambari-agent/cache")
+    config.set('agent', 'tolerate_download_failures', "true")
+    dummy_controller = MagicMock()
+    actionQueue = ActionQueue(config, dummy_controller)
     actionQueue.execute_command(self.datanode_restart_command)
     report = actionQueue.result()
     expected = {'actionId': '1-1',
@@ -414,11 +426,15 @@ class TestActionQueue(TestCase):
   @patch.object(CustomServiceOrchestrator, "requestComponentStatus")
   @patch.object(ActionQueue, "execute_command")
   @patch.object(LiveStatus, "build")
-  def test_execute_status_command(self, build_mock, execute_command_mock,
+  @patch.object(CustomServiceOrchestrator, "__init__")
+  def test_execute_status_command(self, CustomServiceOrchestrator_mock,
+                                  build_mock, execute_command_mock,
                                   requestComponentStatus_mock, read_stack_version_mock,
                                   determine_command_format_version_mock,
                                   status_update_callback):
-    actionQueue = ActionQueue(AmbariConfig().getConfig(), 'dummy_controller')
+    CustomServiceOrchestrator_mock.return_value = None
+    dummy_controller = MagicMock()
+    actionQueue = ActionQueue(AmbariConfig().getConfig(), dummy_controller)
 
     build_mock.return_value = "dummy report"
     # Check execution ov V1 status command
@@ -441,7 +457,10 @@ class TestActionQueue(TestCase):
     self.assertTrue(requestComponentStatus_mock.called)
 
 
-  def test_determine_command_format_version(self):
+  @patch.object(CustomServiceOrchestrator, "__init__")
+  def test_determine_command_format_version(self,
+                                            CustomServiceOrchestrator_mock):
+    CustomServiceOrchestrator_mock.return_value = None
     v1_command = {
       'commandParams': {
         'schema_version': '1.0'
@@ -455,7 +474,8 @@ class TestActionQueue(TestCase):
     current_command = {
       # Absent 'commandParams' section
     }
-    actionQueue = ActionQueue(AmbariConfig().getConfig(), 'dummy_controller')
+    dummy_controller = MagicMock()
+    actionQueue = ActionQueue(AmbariConfig().getConfig(), dummy_controller)
     self.assertEqual(actionQueue.determine_command_format_version(v1_command),
                      ActionQueue.COMMAND_FORMAT_V1)
     self.assertEqual(actionQueue.determine_command_format_version(v2_command),
@@ -469,12 +489,16 @@ class TestActionQueue(TestCase):
   @patch.object(PuppetExecutor, "runCommand")
   @patch.object(CustomServiceOrchestrator, "runCommand")
   @patch.object(ActionQueue, "status_update_callback")
+  @patch.object(CustomServiceOrchestrator, "__init__")
   def test_command_execution_depending_on_command_format(self,
+                                CustomServiceOrchestrator_mock,
                                 status_update_callback_mock,
                                 custom_ex_runCommand_mock,
                                 puppet_runCommand_mock, open_mock,
                                 determine_command_format_version_mock):
-    actionQueue = ActionQueue(AmbariConfig().getConfig(), 'dummy_controller')
+    CustomServiceOrchestrator_mock.return_value = None
+    dummy_controller = MagicMock()
+    actionQueue = ActionQueue(AmbariConfig().getConfig(), dummy_controller)
     ret = {
       'stdout' : '',
       'stderr' : '',

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-agent/src/test/python/ambari_agent/TestController.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestController.py b/ambari-agent/src/test/python/ambari_agent/TestController.py
index b4439e9..1e110da 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestController.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestController.py
@@ -209,7 +209,32 @@ class TestController(unittest.TestCase):
     heartbeatWithServer = MagicMock(name="heartbeatWithServer")
     self.controller.heartbeatWithServer = heartbeatWithServer
 
-    self.controller.isRegistered = True;
+    listener1 = MagicMock()
+    listener2 = MagicMock()
+    self.controller.registration_listeners.append(listener1)
+    self.controller.registration_listeners.append(listener2)
+    self.controller.isRegistered = True
+    self.controller.registerAndHeartbeat()
+    registerWithServer.assert_called_once_with()
+    heartbeatWithServer.assert_called_once_with()
+    self.assertTrue(listener1.called)
+    self.assertTrue(listener2.called)
+
+    self.controller.registerWithServer = \
+      Controller.Controller.registerWithServer
+    self.controller.heartbeatWithServer = \
+      Controller.Controller.registerWithServer
+
+
+  @patch("time.sleep")
+  def test_registerAndHeartbeat_check_registration_listener(self, sleepMock):
+    registerWithServer = MagicMock(name="registerWithServer")
+    registerWithServer.return_value = {"response":"resp"}
+    self.controller.registerWithServer = registerWithServer
+    heartbeatWithServer = MagicMock(name="heartbeatWithServer")
+    self.controller.heartbeatWithServer = heartbeatWithServer
+
+    self.controller.isRegistered = True
     self.controller.registerAndHeartbeat()
     registerWithServer.assert_called_once_with()
     heartbeatWithServer.assert_called_once_with()

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py b/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py
index 971048b..54c17a6 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestCustomServiceOrchestrator.py
@@ -56,11 +56,26 @@ class TestCustomServiceOrchestrator(TestCase):
     self.config.set('python', 'custom_actions_dir', tmpdir)
 
 
+  @patch.object(FileCache, "__init__")
+  def test_add_reg_listener_to_controller(self, FileCache_mock):
+    FileCache_mock.return_value = None
+    dummy_controller = MagicMock()
+    config = AmbariConfig().getConfig()
+    tempdir = tempfile.gettempdir()
+    config.set('agent', 'prefix', tempdir)
+    CustomServiceOrchestrator(config, dummy_controller)
+    self.assertTrue(dummy_controller.registration_listeners.append.called)
+
+
   @patch.object(manifestGenerator, 'decompressClusterHostInfo')
   @patch("hostname.public_hostname")
   @patch("os.path.isfile")
   @patch("os.unlink")
-  def test_dump_command_to_json(self, unlink_mock, isfile_mock, hostname_mock, decompress_cluster_host_info_mock):
+  @patch.object(FileCache, "__init__")
+  def test_dump_command_to_json(self, FileCache_mock, unlink_mock,
+                                isfile_mock, hostname_mock,
+                                decompress_cluster_host_info_mock):
+    FileCache_mock.return_value = None
     hostname_mock.return_value = "test.hst"
     command = {
       'commandType': 'EXECUTION_COMMAND',
@@ -86,7 +101,8 @@ class TestCustomServiceOrchestrator(TestCase):
     config = AmbariConfig().getConfig()
     tempdir = tempfile.gettempdir()
     config.set('agent', 'prefix', tempdir)
-    orchestrator = CustomServiceOrchestrator(config)
+    dummy_controller = MagicMock()
+    orchestrator = CustomServiceOrchestrator(config, dummy_controller)
     isfile_mock.return_value = True
     # Test dumping EXECUTION_COMMAND
     json_file = orchestrator.dump_command_to_json(command)
@@ -112,9 +128,12 @@ class TestCustomServiceOrchestrator(TestCase):
 
 
   @patch("os.path.exists")
-  def test_resolve_script_path(self, exists_mock):
+  @patch.object(FileCache, "__init__")
+  def test_resolve_script_path(self, FileCache_mock, exists_mock):
+    FileCache_mock.return_value = None
+    dummy_controller = MagicMock()
     config = AmbariConfig().getConfig()
-    orchestrator = CustomServiceOrchestrator(config)
+    orchestrator = CustomServiceOrchestrator(config, dummy_controller)
     # Testing existing path
     exists_mock.return_value = True
     path = orchestrator.\
@@ -136,14 +155,18 @@ class TestCustomServiceOrchestrator(TestCase):
   @patch.object(FileCache, "get_hook_base_dir")
   @patch.object(CustomServiceOrchestrator, "dump_command_to_json")
   @patch.object(PythonExecutor, "run_file")
-  def test_runCommand(self, run_file_mock, dump_command_to_json_mock,
+  @patch.object(FileCache, "__init__")
+  def test_runCommand(self, FileCache_mock,
+                      run_file_mock, dump_command_to_json_mock,
                       get_hook_base_dir_mock, get_service_base_dir_mock,
                       resolve_hook_script_path_mock, resolve_script_path_mock):
+    FileCache_mock.return_value = None
     command = {
       'role' : 'REGION_SERVER',
       'hostLevelParams' : {
         'stack_name' : 'HDP',
         'stack_version' : '2.0.7',
+        'jdk_location' : 'some_location'
       },
       'commandParams': {
         'script_type': 'PYTHON',
@@ -159,7 +182,8 @@ class TestCustomServiceOrchestrator(TestCase):
     resolve_hook_script_path_mock.return_value = \
       ('/hooks_dir/prefix-command/scripts/hook.py',
        '/hooks_dir/prefix-command')
-    orchestrator = CustomServiceOrchestrator(self.config)
+    dummy_controller = MagicMock()
+    orchestrator = CustomServiceOrchestrator(self.config, dummy_controller)
     get_hook_base_dir_mock.return_value = "/hooks/"
     # normal run case
     run_file_mock.return_value = {
@@ -206,10 +230,19 @@ class TestCustomServiceOrchestrator(TestCase):
 
   @patch.object(CustomServiceOrchestrator, "dump_command_to_json")
   @patch.object(PythonExecutor, "run_file")
-  def test_runCommand_custom_action(self, run_file_mock, dump_command_to_json_mock):
+  @patch.object(FileCache, "__init__")
+  @patch.object(FileCache, "get_custom_actions_base_dir")
+  def test_runCommand_custom_action(self, get_custom_actions_base_dir_mock,
+                                    FileCache_mock,
+                                    run_file_mock, dump_command_to_json_mock):
+    FileCache_mock.return_value = None
+    get_custom_actions_base_dir_mock.return_value = "some path"
     _, script = tempfile.mkstemp()
     command = {
       'role' : 'any',
+      'hostLevelParams' : {
+        'jdk_location' : 'some_location'
+      },
       'commandParams': {
         'script_type': 'PYTHON',
         'script': 'some_custom_action.py',
@@ -218,8 +251,8 @@ class TestCustomServiceOrchestrator(TestCase):
       'taskId' : '3',
       'roleCommand': 'ACTIONEXECUTE'
     }
-
-    orchestrator = CustomServiceOrchestrator(self.config)
+    dummy_controller = MagicMock()
+    orchestrator = CustomServiceOrchestrator(self.config, dummy_controller)
     # normal run case
     run_file_mock.return_value = {
       'stdout' : 'sss',
@@ -235,9 +268,11 @@ class TestCustomServiceOrchestrator(TestCase):
 
 
   @patch("os.path.isfile")
-  def test_resolve_hook_script_path(self, isfile_mock):
-
-    orchestrator = CustomServiceOrchestrator(self.config)
+  @patch.object(FileCache, "__init__")
+  def test_resolve_hook_script_path(self, FileCache_mock, isfile_mock):
+    FileCache_mock.return_value = None
+    dummy_controller = MagicMock()
+    orchestrator = CustomServiceOrchestrator(self.config, dummy_controller)
     # Testing None param
     res1 = orchestrator.resolve_hook_script_path(None, "prefix", "command",
                                             "script_type")
@@ -256,7 +291,9 @@ class TestCustomServiceOrchestrator(TestCase):
 
 
   @patch.object(CustomServiceOrchestrator, "runCommand")
-  def test_requestComponentStatus(self, runCommand_mock):
+  @patch.object(FileCache, "__init__")
+  def test_requestComponentStatus(self, FileCache_mock, runCommand_mock):
+    FileCache_mock.return_value = None
     status_command = {
       "serviceName" : 'HDFS',
       "commandType" : "STATUS_COMMAND",
@@ -264,7 +301,8 @@ class TestCustomServiceOrchestrator(TestCase):
       "componentName" : "DATANODE",
       'configurations':{}
     }
-    orchestrator = CustomServiceOrchestrator(self.config)
+    dummy_controller = MagicMock()
+    orchestrator = CustomServiceOrchestrator(self.config, dummy_controller)
     # Test alive case
     runCommand_mock.return_value = {
       "exitcode" : 0

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-agent/src/test/python/ambari_agent/TestFileCache.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestFileCache.py b/ambari-agent/src/test/python/ambari_agent/TestFileCache.py
index 5e389d5..023d19a 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestFileCache.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestFileCache.py
@@ -28,15 +28,12 @@ import tempfile
 import time
 from threading import Thread
 
-from PythonExecutor import PythonExecutor
-from CustomServiceOrchestrator import CustomServiceOrchestrator
-from FileCache import FileCache
+from FileCache import FileCache, CachingException
 from AmbariConfig import AmbariConfig
 from mock.mock import MagicMock, patch
 import StringIO
 import sys
-from ambari_agent import AgentException
-from AgentException import AgentException
+import shutil
 
 
 class TestFileCache(TestCase):
@@ -51,56 +48,314 @@ class TestFileCache(TestCase):
     self.config.add_section('agent')
     self.config.set('agent', 'prefix', tmpdir)
     self.config.set('agent', 'cache_dir', "/var/lib/ambari-agent/cache")
+    self.config.set('agent', 'tolerate_download_failures', "true")
 
 
-  @patch("os.path.isdir")
-  def test_get_service_base_dir(self, isdir_mock):
+  def test_reset(self):
     fileCache = FileCache(self.config)
-    # Check existing dir case
-    isdir_mock.return_value = True
-    service_subpath = "HDP/2.1.1/services/ZOOKEEPER/package"
-    base = fileCache.get_service_base_dir(service_subpath)
-    self.assertEqual(base, "/var/lib/ambari-agent/cache/stacks/HDP/2.1.1/"
-                           "services/ZOOKEEPER/package")
-    # Check absent dir case
-    isdir_mock.return_value = False
-    try:
-      fileCache.get_service_base_dir(service_subpath)
-      self.fail("Should throw an exception")
-    except AgentException:
-      pass # Expected
+    fileCache.uptodate_paths.append('dummy-path')
+    fileCache.reset()
+    self.assertFalse(fileCache.uptodate_paths)
 
 
+  @patch.object(FileCache, "provide_directory")
+  def test_get_service_base_dir(self, provide_directory_mock):
+    provide_directory_mock.return_value = "dummy value"
+    fileCache = FileCache(self.config)
+    command = {
+      'commandParams' : {
+        'service_package_folder' : 'HDP/2.1.1/services/ZOOKEEPER/package'
+      }
+    }
+    res = fileCache.get_service_base_dir(command, "server_url_pref")
+    self.assertEquals(
+      pprint.pformat(provide_directory_mock.call_args_list[0][0]),
+      "('/var/lib/ambari-agent/cache',\n "
+      "'stacks/HDP/2.1.1/services/ZOOKEEPER/package',\n"
+      " 'server_url_pref')")
+    self.assertEquals(res, "dummy value")
 
 
-  @patch("os.path.isdir")
-  def test_get_hook_base_dir(self, isdir_mock):
+  @patch.object(FileCache, "provide_directory")
+  def test_get_hook_base_dir(self, provide_directory_mock):
     fileCache = FileCache(self.config)
     # Check missing parameter
     command = {
       'commandParams' : {
       }
     }
-    base = fileCache.get_hook_base_dir(command)
+    base = fileCache.get_hook_base_dir(command, "server_url_pref")
     self.assertEqual(base, None)
+    self.assertFalse(provide_directory_mock.called)
 
     # Check existing dir case
-    isdir_mock.return_value = True
     command = {
       'commandParams' : {
         'hooks_folder' : 'HDP/2.1.1/hooks'
       }
     }
-    base = fileCache.get_hook_base_dir(command)
-    self.assertEqual(base, "/var/lib/ambari-agent/cache/stacks/HDP/2.1.1/hooks")
+    provide_directory_mock.return_value = "dummy value"
+    fileCache = FileCache(self.config)
+    res = fileCache.get_hook_base_dir(command, "server_url_pref")
+    self.assertEquals(
+      pprint.pformat(provide_directory_mock.call_args_list[0][0]),
+      "('/var/lib/ambari-agent/cache', "
+      "'stacks/HDP/2.1.1/hooks', "
+      "'server_url_pref')")
+    self.assertEquals(res, "dummy value")
+
+
+  @patch.object(FileCache, "provide_directory")
+  def test_get_custom_actions_base_dir(self, provide_directory_mock):
+    provide_directory_mock.return_value = "dummy value"
+    fileCache = FileCache(self.config)
+    res = fileCache.get_custom_actions_base_dir("server_url_pref")
+    self.assertEquals(
+      pprint.pformat(provide_directory_mock.call_args_list[0][0]),
+      "('/var/lib/ambari-agent/cache', 'custom_actions', 'server_url_pref')")
+    self.assertEquals(res, "dummy value")
+
+
+  @patch.object(FileCache, "build_download_url")
+  @patch.object(FileCache, "fetch_url")
+  @patch.object(FileCache, "read_hash_sum")
+  @patch.object(FileCache, "invalidate_directory")
+  @patch.object(FileCache, "unpack_archive")
+  @patch.object(FileCache, "write_hash_sum")
+  def test_provide_directory(self, write_hash_sum_mock, unpack_archive_mock,
+                             invalidate_directory_mock,
+                             read_hash_sum_mock, fetch_url_mock,
+                             build_download_url_mock):
+    build_download_url_mock.return_value = "http://dummy-url/"
+    HASH1 = "hash1"
+    membuffer = MagicMock()
+    membuffer.getvalue.return_value.strip.return_value = HASH1
+    fileCache = FileCache(self.config)
+
+    # Test uptodate dirs after start
+    self.assertFalse(fileCache.uptodate_paths)
+
+    # Test initial downloading (when dir does not exist)
+    fetch_url_mock.return_value = membuffer
+    read_hash_sum_mock.return_value = "hash2"
+    res = fileCache.provide_directory("cache_path", "subdirectory",
+                                      "server_url_prefix")
+    self.assertTrue(invalidate_directory_mock.called)
+    self.assertTrue(write_hash_sum_mock.called)
+    self.assertEquals(fetch_url_mock.call_count, 2)
+    self.assertEquals(pprint.pformat(fileCache.uptodate_paths),
+                     "['cache_path/subdirectory']")
+    self.assertEquals(res, 'cache_path/subdirectory')
+
+    fetch_url_mock.reset_mock()
+    write_hash_sum_mock.reset_mock()
+    invalidate_directory_mock.reset_mock()
+    unpack_archive_mock.reset_mock()
+
+    # Test cache invalidation when local hash does not differ
+    fetch_url_mock.return_value = membuffer
+    read_hash_sum_mock.return_value = HASH1
+    fileCache.reset()
+
+    res = fileCache.provide_directory("cache_path", "subdirectory",
+                                      "server_url_prefix")
+    self.assertFalse(invalidate_directory_mock.called)
+    self.assertFalse(write_hash_sum_mock.called)
+    self.assertEquals(fetch_url_mock.call_count, 1)
+    self.assertEquals(pprint.pformat(fileCache.uptodate_paths),
+                      "['cache_path/subdirectory']")
+    self.assertEquals(res, 'cache_path/subdirectory')
+
+    fetch_url_mock.reset_mock()
+    write_hash_sum_mock.reset_mock()
+    invalidate_directory_mock.reset_mock()
+    unpack_archive_mock.reset_mock()
+
+    # Test execution path when path is up-to date (already checked)
+    res = fileCache.provide_directory("cache_path", "subdirectory",
+                                      "server_url_prefix")
+    self.assertFalse(invalidate_directory_mock.called)
+    self.assertFalse(write_hash_sum_mock.called)
+    self.assertEquals(fetch_url_mock.call_count, 0)
+    self.assertEquals(pprint.pformat(fileCache.uptodate_paths),
+                      "['cache_path/subdirectory']")
+    self.assertEquals(res, 'cache_path/subdirectory')
+
+    # Check exception handling when tolerance is disabled
+    self.config.set('agent', 'tolerate_download_failures', "false")
+    fetch_url_mock.side_effect = self.caching_exc_side_effect
+    fileCache = FileCache(self.config)
+    try:
+      fileCache.provide_directory("cache_path", "subdirectory",
+                                  "server_url_prefix")
+      self.fail('CachingException not thrown')
+    except CachingException:
+      pass # Expected
+    except Exception, e:
+      self.fail('Unexpected exception thrown:' + str(e))
+
+    # Check that unexpected exceptions are still propagated when
+    # tolerance is enabled
+    self.config.set('agent', 'tolerate_download_failures', "false")
+    fetch_url_mock.side_effect = self.exc_side_effect
+    fileCache = FileCache(self.config)
+    try:
+      fileCache.provide_directory("cache_path", "subdirectory",
+                                  "server_url_prefix")
+      self.fail('Exception not thrown')
+    except Exception:
+      pass # Expected
 
-    # Check absent dir case
+
+    # Check exception handling when tolerance is enabled
+    self.config.set('agent', 'tolerate_download_failures', "true")
+    fetch_url_mock.side_effect = self.caching_exc_side_effect
+    fileCache = FileCache(self.config)
+    res = fileCache.provide_directory("cache_path", "subdirectory",
+                                  "server_url_prefix")
+    self.assertEquals(res, 'cache_path/subdirectory')
+
+
+  def test_build_download_url(self):
+    fileCache = FileCache(self.config)
+    url = fileCache.build_download_url('http://localhost:8080/resources/',
+                                       'stacks/HDP/2.1.1/hooks', 'archive.zip')
+    self.assertEqual(url,
+        'http://localhost:8080/resources//stacks/HDP/2.1.1/hooks/archive.zip')
+
+
+  @patch("urllib2.urlopen")
+  def test_fetch_url(self, urlopen_mock):
+    fileCache = FileCache(self.config)
+    remote_url = "http://dummy-url/"
+    # Test normal download
+    test_str = 'abc' * 100000 # Very long string
+    test_string_io = StringIO.StringIO(test_str)
+    test_buffer = MagicMock()
+    test_buffer.read.side_effect = test_string_io.read
+    urlopen_mock.return_value = test_buffer
+
+    memory_buffer = fileCache.fetch_url(remote_url)
+
+    self.assertEquals(memory_buffer.getvalue(), test_str)
+    self.assertEqual(test_buffer.read.call_count, 20) # depends on buffer size
+    # Test exception handling
+    test_buffer.read.side_effect = self.exc_side_effect
+    try:
+      fileCache.fetch_url(remote_url)
+      self.fail('CachingException not thrown')
+    except CachingException:
+      pass # Expected
+    except Exception, e:
+      self.fail('Unexpected exception thrown:' + str(e))
+
+
+  def test_read_write_hash_sum(self):
+    tmpdir = tempfile.mkdtemp()
+    dummyhash = "DUMMY_HASH"
+    fileCache = FileCache(self.config)
+    fileCache.write_hash_sum(tmpdir, dummyhash)
+    newhash = fileCache.read_hash_sum(tmpdir)
+    self.assertEquals(newhash, dummyhash)
+    shutil.rmtree(tmpdir)
+    # Test read of not existing file
+    newhash = fileCache.read_hash_sum(tmpdir)
+    self.assertEquals(newhash, None)
+    # Test write to not existing file
+    with patch("__builtin__.open") as open_mock:
+      open_mock.side_effect = self.exc_side_effect
+      try:
+        fileCache.write_hash_sum(tmpdir, dummyhash)
+        self.fail('CachingException not thrown')
+      except CachingException:
+        pass # Expected
+      except Exception, e:
+        self.fail('Unexpected exception thrown:' + str(e))
+
+
+  @patch("os.path.isfile")
+  @patch("os.path.isdir")
+  @patch("os.unlink")
+  @patch("shutil.rmtree")
+  @patch("os.makedirs")
+  def test_invalidate_directory(self, makedirs_mock, rmtree_mock,
+                                unlink_mock, isdir_mock, isfile_mock):
+    fileCache = FileCache(self.config)
+    # Test execution flow if path points to file
+    isfile_mock.return_value = True
     isdir_mock.return_value = False
+
+    fileCache.invalidate_directory("dummy-dir")
+
+    self.assertTrue(unlink_mock.called)
+    self.assertFalse(rmtree_mock.called)
+    self.assertTrue(makedirs_mock.called)
+
+    unlink_mock.reset_mock()
+    rmtree_mock.reset_mock()
+    makedirs_mock.reset_mock()
+
+    # Test execution flow if path points to dir
+    isfile_mock.return_value = False
+    isdir_mock.return_value = True
+
+    fileCache.invalidate_directory("dummy-dir")
+
+    self.assertFalse(unlink_mock.called)
+    self.assertTrue(rmtree_mock.called)
+    self.assertTrue(makedirs_mock.called)
+
+    unlink_mock.reset_mock()
+    rmtree_mock.reset_mock()
+    makedirs_mock.reset_mock()
+
+    # Test exception handling
+    makedirs_mock.side_effect = self.exc_side_effect
     try:
-      fileCache.get_hook_base_dir(command)
-      self.fail("Should throw an exception")
-    except AgentException:
+      fileCache.invalidate_directory("dummy-dir")
+      self.fail('CachingException not thrown')
+    except CachingException:
       pass # Expected
+    except Exception, e:
+      self.fail('Unexpected exception thrown:' + str(e))
+
+
+  def test_unpack_archive(self):
+    tmpdir = tempfile.mkdtemp()
+    dummy_archive = os.path.join("ambari_agent", "dummy_files",
+                                 "dummy_archive.zip")
+    # Test normal flow
+    with open(dummy_archive, "r") as f:
+      data = f.read(os.path.getsize(dummy_archive))
+      membuf = StringIO.StringIO(data)
+
+    fileCache = FileCache(self.config)
+    fileCache.unpack_archive(membuf, tmpdir)
+    # Count summary size of unpacked files:
+    total_size = 0
+    total_files = 0
+    total_dirs = 0
+    for dirpath, dirnames, filenames in os.walk(tmpdir):
+      total_dirs += 1
+      for f in filenames:
+        fp = os.path.join(dirpath, f)
+        total_size += os.path.getsize(fp)
+        total_files += 1
+    self.assertEquals(total_size, 51258L)
+    self.assertEquals(total_files, 28)
+    self.assertEquals(total_dirs, 8)
+    shutil.rmtree(tmpdir)
+
+    # Test exception handling
+    with patch("os.path.isdir") as isdir_mock:
+      isdir_mock.side_effect = self.exc_side_effect
+      try:
+        fileCache.unpack_archive(membuf, tmpdir)
+        self.fail('CachingException not thrown')
+      except CachingException:
+        pass # Expected
+      except Exception, e:
+        self.fail('Unexpected exception thrown:' + str(e))
 
 
   def tearDown(self):
@@ -108,3 +363,8 @@ class TestFileCache(TestCase):
     sys.stdout = sys.__stdout__
 
 
+  def exc_side_effect(self, *a):
+    raise Exception("horrible_exc")
+
+  def caching_exc_side_effect(self, *a):
+    raise CachingException("horrible_caching_exc")
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-agent/src/test/python/ambari_agent/TestHeartbeat.py
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/TestHeartbeat.py b/ambari-agent/src/test/python/ambari_agent/TestHeartbeat.py
index 906244d..c6a834d 100644
--- a/ambari-agent/src/test/python/ambari_agent/TestHeartbeat.py
+++ b/ambari-agent/src/test/python/ambari_agent/TestHeartbeat.py
@@ -47,7 +47,12 @@ class TestHeartbeat(TestCase):
 
 
   def test_build(self):
-    actionQueue = ActionQueue(AmbariConfig.AmbariConfig().getConfig(),'dummy_controller')
+    config = AmbariConfig.AmbariConfig().getConfig()
+    config.set('agent', 'prefix', 'tmp')
+    config.set('agent', 'cache_dir', "/var/lib/ambari-agent/cache")
+    config.set('agent', 'tolerate_download_failures', "true")
+    dummy_controller = MagicMock()
+    actionQueue = ActionQueue(config, dummy_controller)
     heartbeat = Heartbeat(actionQueue)
     result = heartbeat.build(100)
     print "Heartbeat: " + str(result)
@@ -80,7 +85,12 @@ class TestHeartbeat(TestCase):
                    'exitCode': 777}],
       'componentStatus': [{'status': 'HEALTHY', 'componentName': 'NAMENODE'}]
     }
-    actionQueue = ActionQueue(AmbariConfig.AmbariConfig().getConfig(),'dummy_controller')
+    config = AmbariConfig.AmbariConfig().getConfig()
+    config.set('agent', 'prefix', 'tmp')
+    config.set('agent', 'cache_dir', "/var/lib/ambari-agent/cache")
+    config.set('agent', 'tolerate_download_failures', "true")
+    dummy_controller = MagicMock()
+    actionQueue = ActionQueue(config, dummy_controller)
     heartbeat = Heartbeat(actionQueue)
     hb = heartbeat.build(id = 10, state_interval=1, componentsMapped=True)
     self.assertEqual(register_mock.call_args_list[0][0][1], True)
@@ -92,7 +102,12 @@ class TestHeartbeat(TestCase):
 
   @patch.object(ActionQueue, "result")
   def test_build_long_result(self, result_mock):
-    actionQueue = ActionQueue(AmbariConfig.AmbariConfig().getConfig(),'dummy_controller')
+    config = AmbariConfig.AmbariConfig().getConfig()
+    config.set('agent', 'prefix', 'tmp')
+    config.set('agent', 'cache_dir', "/var/lib/ambari-agent/cache")
+    config.set('agent', 'tolerate_download_failures', "true")
+    dummy_controller = MagicMock()
+    actionQueue = ActionQueue(config, dummy_controller)
     result_mock.return_value = {
       'reports': [{'status': 'IN_PROGRESS',
             'stderr': 'Read from /tmp/errors-3.txt',
@@ -178,7 +193,12 @@ class TestHeartbeat(TestCase):
 
   @patch.object(HostInfo, 'register')
   def test_heartbeat_no_host_check_cmd_in_queue(self, register_mock):
-    actionQueue = ActionQueue(AmbariConfig.AmbariConfig().getConfig(),'dummy_controller')
+    config = AmbariConfig.AmbariConfig().getConfig()
+    config.set('agent', 'prefix', 'tmp')
+    config.set('agent', 'cache_dir', "/var/lib/ambari-agent/cache")
+    config.set('agent', 'tolerate_download_failures', "true")
+    dummy_controller = MagicMock()
+    actionQueue = ActionQueue(config, dummy_controller)
     statusCommand = {
       "serviceName" : 'HDFS',
       "commandType" : "STATUS_COMMAND",
@@ -198,7 +218,12 @@ class TestHeartbeat(TestCase):
 
   @patch.object(HostInfo, 'register')
   def test_heartbeat_host_check_no_cmd(self, register_mock):
-    actionQueue = ActionQueue(AmbariConfig.AmbariConfig().getConfig(),'dummy_controller')
+    config = AmbariConfig.AmbariConfig().getConfig()
+    config.set('agent', 'prefix', 'tmp')
+    config.set('agent', 'cache_dir', "/var/lib/ambari-agent/cache")
+    config.set('agent', 'tolerate_download_failures', "true")
+    dummy_controller = MagicMock()
+    actionQueue = ActionQueue(config, dummy_controller)
     heartbeat = Heartbeat(actionQueue)
     heartbeat.build(12, 6)
     self.assertTrue(register_mock.called)

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-agent/src/test/python/ambari_agent/dummy_files/dummy_archive.zip
----------------------------------------------------------------------
diff --git a/ambari-agent/src/test/python/ambari_agent/dummy_files/dummy_archive.zip b/ambari-agent/src/test/python/ambari_agent/dummy_files/dummy_archive.zip
new file mode 100644
index 0000000..9a77d7e
Binary files /dev/null and b/ambari-agent/src/test/python/ambari_agent/dummy_files/dummy_archive.zip differ

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-server/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-server/pom.xml b/ambari-server/pom.xml
index 1d6b427..bc83dd8 100644
--- a/ambari-server/pom.xml
+++ b/ambari-server/pom.xml
@@ -327,7 +327,6 @@
                   <location>${project.build.directory}/DBConnectionVerification.jar</location>
                 </source>
                 <source>
-                  <!-- This file is also included into agent rpm -->
                   <location>src/main/resources/role_command_order.json</location>
                 </source>
               </sources>
@@ -390,6 +389,9 @@
               <groupname>root</groupname>
               <sources>
                 <source>
+                  <location>src/main/python/ambari_server</location>
+                </source>
+                <source>
                   <location>src/main/python/bootstrap.py</location>
                 </source>
                 <source>
@@ -420,6 +422,18 @@
                 </source>
               </sources>
             </mapping>
+            <mapping>
+              <!-- custom actions root-->
+              <directory>/var/lib/ambari-server/resources/custom_actions</directory>
+              <filemode>755</filemode>
+              <username>root</username>
+              <groupname>root</groupname>
+              <sources>
+                <source>
+                  <location>src/main/resources/custom_actions</location>
+                </source>
+              </sources>
+            </mapping>
           </mappings>
         </configuration>
       </plugin>

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java
index 2babd6b..a339784 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/HeartbeatMonitor.java
@@ -28,6 +28,7 @@ import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.actionmanager.ActionManager;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.configuration.Configuration;
+import org.apache.ambari.server.controller.AmbariManagementController;
 import org.apache.ambari.server.state.*;
 import org.apache.ambari.server.state.fsm.InvalidStateTransitionException;
 import org.apache.ambari.server.state.host.HostHeartbeatLostEvent;
@@ -49,6 +50,7 @@ public class HeartbeatMonitor implements Runnable {
   private Thread monitorThread = null;
   private final ConfigHelper configHelper;
   private final AmbariMetaInfo ambariMetaInfo;
+  private final AmbariManagementController ambariManagementController;
   private final Configuration configuration;
 
   public HeartbeatMonitor(Clusters clusters, ActionQueue aq, ActionManager am,
@@ -59,6 +61,8 @@ public class HeartbeatMonitor implements Runnable {
     this.threadWakeupInterval = threadWakeupInterval;
     this.configHelper = injector.getInstance(ConfigHelper.class);
     this.ambariMetaInfo = injector.getInstance(AmbariMetaInfo.class);
+    this.ambariManagementController = injector.getInstance(
+            AmbariManagementController.class);
     this.configuration = injector.getInstance(Configuration.class);
   }
 
@@ -191,7 +195,6 @@ public class HeartbeatMonitor implements Runnable {
     String serviceName = sch.getServiceName();
     String componentName = sch.getServiceComponentName();
     Service service = cluster.getService(sch.getServiceName());
-    ServiceComponent sc = service.getServiceComponent(componentName);
     StackId stackId = cluster.getDesiredStackVersion();
     ServiceInfo serviceInfo = ambariMetaInfo.getServiceInfo(stackId.getStackName(),
             stackId.getStackVersion(), serviceName);
@@ -267,6 +270,7 @@ public class HeartbeatMonitor implements Runnable {
     commandParams.put(HOOKS_FOLDER, stackInfo.getStackHooksFolder());
     // Fill host level params
     Map<String, String> hostLevelParams = statusCmd.getHostLevelParams();
+    hostLevelParams.put(JDK_LOCATION, ambariManagementController.getJdkResourceUrl());
     hostLevelParams.put(STACK_NAME, stackId.getStackName());
     hostLevelParams.put(STACK_VERSION, stackId.getStackVersion());
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
index 7f2d1fb..be800e7 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
@@ -125,8 +125,8 @@ public class AmbariCustomCommandExecutionHelper {
   @Inject
   private ConfigHelper configHelper;
 
-  private Boolean isServiceCheckCommand(String
-                                          command, String service) {
+
+  private Boolean isServiceCheckCommand(String command, String service) {
     List<String> actions = actionMetadata.getActions(service);
     if (actions == null || actions.size() == 0) {
       return false;

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
index e4ebc1c..b9f62e4 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java
@@ -322,9 +322,7 @@ public class AmbariServer {
       resources.setInitParameter("com.sun.jersey.config.property.resourceConfigClass",
           "com.sun.jersey.api.core.PackagesResourceConfig");
       resources.setInitParameter("com.sun.jersey.config.property.packages",
-          "org.apache.ambari.server.resources.api.rest;" + "org.apache.ambari.server.api");
-      resources.setInitParameter("com.sun.jersey.api.json.POJOMappingFeature",
-          "true");
+          "org.apache.ambari.server.resources.api.rest;");
       root.addServlet(resources, "/resources/*");
       resources.setInitOrder(6);
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-server/src/main/python/ambari-server.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/ambari-server.py b/ambari-server/src/main/python/ambari-server.py
index 61233d3..e3439e0 100755
--- a/ambari-server/src/main/python/ambari-server.py
+++ b/ambari-server/src/main/python/ambari-server.py
@@ -39,6 +39,7 @@ import datetime
 import tempfile
 import random
 import pwd
+from ambari_server.resourceFilesKeeper import ResourceFilesKeeper, KeeperException
 
 # debug settings
 VERBOSE = False
@@ -593,6 +594,8 @@ NR_ADJUST_OWNERSHIP_LIST =[
   ( "/var/lib/ambari-server/keys/db", "700", "{0}", False ),
   ( "/var/lib/ambari-server/keys/db/newcerts", "700", "{0}", False ),
   ( "/var/lib/ambari-server/keys/.ssh", "700", "{0}", False ),
+  ( "/var/lib/ambari-server/resources/stacks/", "755", "{0}", True ),
+  ( "/var/lib/ambari-server/resources/custom_actions/", "755", "{0}", True ),
   ( "/etc/ambari-server/conf", "644", "{0}", True ),
   ( "/etc/ambari-server/conf", "755", "{0}", False ),
   ( "/etc/ambari-server/conf/password.dat", "640", "{0}", False ),
@@ -2550,6 +2553,20 @@ def start(args):
       print "Please do not forget to start PostgreSQL server."
 
   properties = get_ambari_properties()
+  stack_location = get_stack_location(properties)
+  # Hack: we determine resource dir as a parent dir for stack_location
+  resources_location = os.path.dirname(stack_location)
+  resource_files_keeper = ResourceFilesKeeper(resources_location)
+
+  try:
+    print "Organizing resource files at {0}...".format(resources_location,
+                                                       verbose=VERBOSE)
+    resource_files_keeper.perform_housekeeping()
+  except KeeperException, ex:
+    msg = "Can not organize resource files at {0}: {1}".format(
+                                                resources_location, str(ex))
+    raise FatalException(-1, msg)
+
   isSecure = get_is_secure(properties)
   (isPersisted, masterKeyFile) = get_is_persisted(properties)
   environ = os.environ.copy()
@@ -2804,16 +2821,20 @@ def upgrade_local_repo_db(args, dbkey, dbvalue):
     return retcode
   pass
 
+
+def get_stack_location(properties):
+  stack_location = properties[STACK_LOCATION_KEY]
+  if stack_location is None:
+    stack_location = STACK_LOCATION_DEFAULT
+  return stack_location
+
 def upgrade_local_repo(args):
   properties = get_ambari_properties()
   if properties == -1:
     print_error_msg ("Error getting ambari properties")
     return -1
 
-  stack_location = properties[STACK_LOCATION_KEY]
-  if stack_location is None:
-    stack_location = STACK_LOCATION_DEFAULT
-
+  stack_location = get_stack_location(properties)
   stack_root_local = os.path.join(stack_location, "HDPLocal")
   if not os.path.exists(stack_root_local):
     print_info_msg("HDPLocal stack directory does not exist, skipping")

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-server/src/main/python/ambari_server/__init__.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/ambari_server/__init__.py b/ambari-server/src/main/python/ambari_server/__init__.py
new file mode 100644
index 0000000..16818c9
--- /dev/null
+++ b/ambari-server/src/main/python/ambari_server/__init__.py
@@ -0,0 +1,21 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-server/src/main/python/ambari_server/resourceFilesKeeper.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/python/ambari_server/resourceFilesKeeper.py b/ambari-server/src/main/python/ambari_server/resourceFilesKeeper.py
new file mode 100644
index 0000000..92cab3d
--- /dev/null
+++ b/ambari-server/src/main/python/ambari_server/resourceFilesKeeper.py
@@ -0,0 +1,258 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+import hashlib
+
+import os, sys
+import zipfile
+import glob
+import pprint
+from xml.dom import minidom
+
+
+class KeeperException(Exception):
+  pass
+
+class ResourceFilesKeeper():
+  """
+  This class incapsulates all utility methods for resource files maintenance.
+  """
+
+  HOOKS_DIR="hooks"
+  PACKAGE_DIR="package"
+  STACKS_DIR="stacks"
+  CUSTOM_ACTIONS_DIR="custom_actions"
+
+  # For these directories archives are created
+  ARCHIVABLE_DIRS = [HOOKS_DIR, PACKAGE_DIR]
+
+  HASH_SUM_FILE=".hash"
+  ARCHIVE_NAME="archive.zip"
+
+  PYC_EXT=".pyc"
+  METAINFO_XML = "metainfo.xml"
+
+  BUFFER = 1024 * 32
+
+  # Change that to True to see debug output at stderr
+  DEBUG=False
+
+  def __init__(self, resources_dir, verbose=False):
+    self.resources_dir = resources_dir
+    self.verbose = verbose
+
+
+  def perform_housekeeping(self):
+    """
+    Performs housekeeping operations on resource files
+    """
+    self.update_directory_archieves()
+    # probably, later we will need some additional operations
+
+
+  def update_directory_archieves(self):
+    """
+    Please see AMBARI-4481 for more details
+    """
+    stacks_root = os.path.join(self.resources_dir, self.STACKS_DIR)
+    self.dbg_out("Updating archives for stack dirs at {0}...".format(stacks_root))
+    active_stacks = self.list_active_stacks(stacks_root)
+    self.dbg_out("Active stacks: {0}".format(pprint.pformat(active_stacks)))
+    # Iterate over active stack directories
+    for stack_dir in active_stacks:
+      for root, dirs, _ in os.walk(stack_dir):
+        for d in dirs:
+          if d in self.ARCHIVABLE_DIRS:
+            full_path = os.path.abspath(os.path.join(root, d))
+            self.update_directory_archive(full_path)
+
+
+    custom_actions_root = os.path.join(self.resources_dir,
+                                       self.CUSTOM_ACTIONS_DIR)
+    self.dbg_out("Updating archive for custom_actions dir at {0}...".format(
+                                       custom_actions_root))
+    self.update_directory_archive(custom_actions_root)
+
+
+
+  def list_active_stacks(self, stacks_root):
+    """
+    Builds a list of stack directories, that are active (enabled)
+    """
+    active_stacks = [] # Format: <stack_dir, ignore(True|False)>
+    glob_pattern = "{0}/*/*".format(stacks_root)
+    try:
+      stack_dirs = glob.glob(glob_pattern)
+      for directory in stack_dirs:
+        metainfo_file = os.path.join(directory, self.METAINFO_XML)
+        if os.path.exists(metainfo_file) and self.is_active_stack(metainfo_file):
+          active_stacks.append(directory)
+      return active_stacks
+    except Exception, err:
+      raise KeeperException("Can not list active stacks: {0}".format(str(err)))
+
+
+  def update_directory_archive(self, directory):
+    """
+    If hash sum for directory is not present or differs from saved value,
+    recalculates hash sum and creates directory archive
+    """
+    cur_hash = self.count_hash_sum(directory)
+    saved_hash = self.read_hash_sum(directory)
+    if cur_hash != saved_hash:
+      self.zip_directory(directory)
+      self.write_hash_sum(directory, cur_hash)
+
+
+  def count_hash_sum(self, directory):
+    """
+    Recursively counts hash sum of all files in directory and subdirectories.
+    Files and directories are processed in alphabetical order.
+    Ignores previously created directory archives and files containing
+    previously calculated hashes. Compiled pyc files are also ignored
+    """
+    try:
+      sha1 = hashlib.sha1()
+      file_list = []
+      for root, dirs, files in os.walk(directory):
+        for f in files:
+          if not self.is_ignored(f):
+            full_path = os.path.abspath(os.path.join(root, f))
+            file_list.append(full_path)
+      file_list.sort()
+      for path in file_list:
+        self.dbg_out("Counting hash of {0}".format(path))
+        with open(path, 'rb') as fh:
+          while True:
+            data = fh.read(self.BUFFER)
+            if not data:
+              break
+            sha1.update(data)
+      return sha1.hexdigest()
+    except Exception, err:
+      raise KeeperException("Can not calculate directory "
+                            "hash: {0}".format(str(err)))
+
+
+  def read_hash_sum(self, directory):
+    """
+    Tries to read a hash sum from previously generated file. Returns string
+    containing hash or None
+    """
+    hash_file = os.path.join(directory, self.HASH_SUM_FILE)
+    if os.path.isfile(hash_file):
+      try:
+        with open(hash_file) as fh:
+          return fh.readline().strip()
+      except Exception, err:
+        raise KeeperException("Can not read file {0} : {1}".format(hash_file,
+                                                                   str(err)))
+    else:
+      return None
+
+
+  def write_hash_sum(self, directory, new_hash):
+    """
+    Tries to read a hash sum from previously generated file. Returns string
+    containing hash or None
+    """
+    hash_file = os.path.join(directory, self.HASH_SUM_FILE)
+    try:
+      with open(hash_file, "w") as fh:
+        fh.write(new_hash)
+    except Exception, err:
+      raise KeeperException("Can not write to file {0} : {1}".format(hash_file,
+                                                                   str(err)))
+
+
+  def zip_directory(self, directory):
+    """
+    Packs entire directory into zip file. Hash file is also packaged
+    into archive
+    """
+    self.dbg_out("creating archive for directory {0}".format(directory))
+    try:
+      zf = zipfile.ZipFile(os.path.join(directory, self.ARCHIVE_NAME), "w")
+      abs_src = os.path.abspath(directory)
+      for root, dirs, files in os.walk(directory):
+        for filename in files:
+          # Avoid zipping previous archive and hash file and binary pyc files
+          if not self.is_ignored(filename):
+            absname = os.path.abspath(os.path.join(root, filename))
+            arcname = absname[len(abs_src) + 1:]
+            self.dbg_out('zipping %s as %s' % (os.path.join(root, filename),
+                                        arcname))
+            zf.write(absname, arcname)
+      zf.close()
+    except Exception, err:
+      raise KeeperException("Can not create zip archive of "
+                            "directory {0} : {1}".format(directory, str(err)))
+
+
+  def is_ignored(self, filename):
+    """
+    returns True if filename is ignored when calculating hashing or archiving
+    """
+    return filename in [self.HASH_SUM_FILE, self.ARCHIVE_NAME] or \
+           filename.endswith(self.PYC_EXT)
+
+
+  def dbg_out(self, text):
+    if self.DEBUG:
+      sys.stderr.write("{0}\n".format(text))
+    if not self.DEBUG and self.verbose:
+      print text
+
+
+  def is_active_stack(self, xmlfile):
+    try:
+      xmldoc = minidom.parse(xmlfile)
+      value = self.xpath_like_bycicle(xmldoc, ['metainfo', 'versions', 'active'])
+      return value.lower().strip() == 'true'
+    except Exception, err:
+      raise KeeperException("Can not parse XML file {0} : {1}",
+                            xmlfile, str(err))
+
+
+  def xpath_like_bycicle(self, xml_doc, path):
+    # Default Python 2.6 distribution have no good XPATH support,
+    # implementing own bycicle here
+    cur_elem = xml_doc
+    for name in path:
+      elem = self.find_xml_element_by_name(cur_elem._get_childNodes(), name)
+      if name:
+        cur_elem = elem
+      else:
+        return None
+    # Select text in tags
+    value = cur_elem._get_childNodes()[0].nodeValue
+    if value:
+      return value.lower().strip()
+    else:
+      return None
+
+
+  def find_xml_element_by_name(self, elements, element_name):
+    for xml_element in elements:
+      if xml_element.nodeType == xml_element.ELEMENT_NODE and \
+                      xml_element.nodeName == element_name:
+        return xml_element
+    return None
+
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/02f9c453/ambari-server/src/test/python/TestAmbariServer.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/TestAmbariServer.py b/ambari-server/src/test/python/TestAmbariServer.py
index 43494a7..3075391 100644
--- a/ambari-server/src/test/python/TestAmbariServer.py
+++ b/ambari-server/src/test/python/TestAmbariServer.py
@@ -29,6 +29,7 @@ import stat
 import datetime
 import operator
 from pwd import getpwnam
+from ambari_server.resourceFilesKeeper import ResourceFilesKeeper, KeeperException
 
 # We have to use this import HACK because the filename contains a dash
 ambari_server = __import__('ambari-server')
@@ -2455,7 +2456,9 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
   @patch.object(ambari_server, "find_jdbc_driver")
   @patch("getpass.getuser")
   @patch("os.chdir")
-  def test_start(self, chdir_mock, getuser_mock, find_jdbc_driver_mock, is_root_mock, read_ambari_user_mock,
+  @patch.object(ResourceFilesKeeper, "perform_housekeeping")
+  def test_start(self, perform_housekeeping_mock, chdir_mock, getuser_mock,
+                 find_jdbc_driver_mock, is_root_mock, read_ambari_user_mock,
                  parse_properties_file_mock, check_postgre_up_mock,
                  print_error_msg_mock, find_jdk_mock, search_file_mock,
                  print_info_msg_mock, popenMock, openMock, pexistsMock,
@@ -2556,6 +2559,18 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
       # Ignored
       pass
 
+    # Test exception handling on resource files housekeeping
+    perform_housekeeping_mock.reset_mock()
+    perform_housekeeping_mock.side_effect = KeeperException("some_reason")
+    try:
+      ambari_server.start(args)
+      self.fail("Should fail with exception")
+    except FatalException as e:
+      self.assertTrue('some_reason' in e.reason)
+    self.assertTrue(perform_housekeeping_mock.called)
+    perform_housekeeping_mock.side_effect = lambda *v, **kv : None
+    perform_housekeeping_mock.reset_mock()
+
     self.assertFalse('Unable to start PostgreSQL server' in e.reason)
     self.assertFalse(check_postgre_up_mock.called)
 
@@ -2585,6 +2600,8 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
     self.assertTrue(popenMock.called)
     popen_arg = popenMock.call_args[0][0]
     self.assertTrue(popen_arg[0] == "/bin/sh")
+    self.assertTrue(perform_housekeeping_mock.called)
+    perform_housekeeping_mock.reset_mock()
     popenMock.reset_mock()
 
     parse_properties_file_mock.reset_mock()
@@ -2596,6 +2613,7 @@ MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV
     self.assertTrue(popenMock.called)
     popen_arg = popenMock.call_args[0][0]
     self.assertTrue(popen_arg[0] == "/bin/su")
+    self.assertTrue(perform_housekeeping_mock.called)
     check_postgre_up_mock.reset_mock()
 
     popenMock.reset_mock()