You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by sm...@apache.org on 2016/04/18 20:25:32 UTC

[2/2] ambari git commit: AMBARI-15937. Use exclude list in order to merge hive-interactve-site from hive-site. Both will show common props in their config bag. (smohanty)

AMBARI-15937. Use exclude list in order to merge hive-interactve-site from hive-site. Both will show common props in their config bag. (smohanty)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/58e7b125
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/58e7b125
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/58e7b125

Branch: refs/heads/trunk
Commit: 58e7b1259de9f8cf28bb0034c6fa3f134a6d66b6
Parents: 2d554f8
Author: Sumit Mohanty <sm...@hortonworks.com>
Authored: Mon Apr 18 11:25:18 2016 -0700
Committer: Sumit Mohanty <sm...@hortonworks.com>
Committed: Mon Apr 18 11:25:18 2016 -0700

----------------------------------------------------------------------
 .../HIVE/0.12.0.2.0/package/scripts/hive.py     |   51 +-
 .../package/scripts/hive_interactive.py         |   53 +-
 .../0.12.0.2.0/package/scripts/params_linux.py  |    4 +-
 .../stacks/2.5/HIVE/test_hive_server_int.py     |  249 ++++
 .../python/stacks/2.5/configs/hsi_default.json  | 1224 ++++++++++++++++++
 5 files changed, 1549 insertions(+), 32 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/58e7b125/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
index 7d913e5..a2ee491 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
@@ -303,16 +303,31 @@ def hive(name=None):
     )
 
   if name != "client":
-    create_directory(params.hive_pid_dir)
-    create_directory(params.hive_log_dir)
-    create_directory(params.hive_var_lib)
+    Directory(params.hive_pid_dir,
+              create_parents = True,
+              cd_access='a',
+              owner=params.hive_user,
+              group=params.user_group,
+              mode=0755)
+    Directory(params.hive_log_dir,
+              create_parents = True,
+              cd_access='a',
+              owner=params.hive_user,
+              group=params.user_group,
+              mode=0755)
+    Directory(params.hive_var_lib,
+              create_parents = True,
+              cd_access='a',
+              owner=params.hive_user,
+              group=params.user_group,
+              mode=0755)
 
 """
 Writes configuration files required by Hive.
 """
 def fill_conf_dir(component_conf_dir):
   import params
-  
+
   Directory(component_conf_dir,
             owner=params.hive_user,
             group=params.user_group,
@@ -328,8 +343,15 @@ def fill_conf_dir(component_conf_dir):
             mode=0644)
 
 
-  crt_file(format("{component_conf_dir}/hive-default.xml.template"))
-  crt_file(format("{component_conf_dir}/hive-env.sh.template"))
+  File(format("{component_conf_dir}/hive-default.xml.template"),
+       owner=params.hive_user,
+       group=params.user_group
+  )
+
+  File(format("{component_conf_dir}/hive-env.sh.template"),
+       owner=params.hive_user,
+       group=params.user_group
+  )
 
   log4j_exec_filename = 'hive-exec-log4j.properties'
   if (params.log4j_exec_props != None):
@@ -363,23 +385,6 @@ def fill_conf_dir(component_conf_dir):
          content=StaticFile(format("{component_conf_dir}/{log4j_filename}.template"))
     )
 
-def create_directory(name):
-  import params
-
-  Directory(name,
-            create_parents = True,
-            cd_access='a',
-            owner=params.hive_user,
-            group=params.user_group,
-            mode=0755)
-
-def crt_file(name):
-  import params
-
-  File(name,
-       owner=params.hive_user,
-       group=params.user_group
-  )
 
 def jdbc_connector():
   import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/58e7b125/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py
index 9d05d37..9cfd41f 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_interactive.py
@@ -41,7 +41,7 @@ from resource_management.core import utils
 
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons import OSConst
-from hive import fill_conf_dir, create_directory, jdbc_connector
+from hive import fill_conf_dir, jdbc_connector
 
 
 @OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
@@ -55,6 +55,13 @@ Sets up the configs, jdbc connection and tarball copy to HDFS for Hive Server In
 def hive_interactive(name=None):
   import params
 
+  # list of properties that should be excluded from the config
+  # this approach is a compromise against adding a dedicated config
+  # type for hive_server_interactive or needed config groups on a
+  # per component basis
+  exclude_list = ['hive.enforce.bucketing',
+                  'hive.enforce.sorting']
+
   # Copy Tarballs in HDFS.
   resource_created = copy_to_hdfs("tez_hive2",
                params.user_group,
@@ -75,8 +82,7 @@ def hive_interactive(name=None):
             mode=0755
             )
 
-  for conf_dir in params.hive_interactive_conf_dir:
-    fill_conf_dir(conf_dir)
+  fill_conf_dir(params.hive_server_interactive_conf_dir)
 
   '''
   As hive2/hive-site.xml only contains the new + the changed props compared to hive/hive-site.xml,
@@ -85,6 +91,11 @@ def hive_interactive(name=None):
   merged_hive_interactive_site = {}
   merged_hive_interactive_site.update(params.config['configurations']['hive-site'])
   merged_hive_interactive_site.update(params.config['configurations']['hive-interactive-site'])
+  for item in exclude_list:
+    if item in merged_hive_interactive_site.keys():
+      del merged_hive_interactive_site[item]
+
+  # Anything TODO for attributes
 
   XmlConfig("hive-site.xml",
             conf_dir=params.hive_server_interactive_conf_dir,
@@ -94,6 +105,7 @@ def hive_interactive(name=None):
             group=params.user_group,
             mode=0644)
 
+  # Merge tez-interactive with tez-site
   XmlConfig("tez-site.xml",
              conf_dir = params.tez_interactive_config_dir,
              configurations = params.config['configurations']['tez-interactive-site'],
@@ -108,6 +120,20 @@ def hive_interactive(name=None):
        content=InlineTemplate(params.hive_interactive_env_sh_template)
        )
 
+  # On some OS this folder could be not exists, so we will create it before pushing there files
+  Directory(params.limits_conf_dir,
+            create_parents = True,
+            owner='root',
+            group='root'
+  )
+
+  File(os.path.join(params.limits_conf_dir, 'hive.conf'),
+       owner='root',
+       group='root',
+       mode=0644,
+       content=Template("hive.conf.j2")
+  )
+
   if not os.path.exists(params.target_hive_interactive):
     jdbc_connector()
 
@@ -120,6 +146,21 @@ def hive_interactive(name=None):
        content=Template(format('{start_hiveserver2_interactive_script}'))
        )
 
-  create_directory(params.hive_pid_dir)
-  create_directory(params.hive_log_dir)
-  create_directory(params.hive_interactive_var_lib)
\ No newline at end of file
+  Directory(params.hive_pid_dir,
+            create_parents=True,
+            cd_access='a',
+            owner=params.hive_user,
+            group=params.user_group,
+            mode=0755)
+  Directory(params.hive_log_dir,
+            create_parents=True,
+            cd_access='a',
+            owner=params.hive_user,
+            group=params.user_group,
+            mode=0755)
+  Directory(params.hive_interactive_var_lib,
+            create_parents=True,
+            cd_access='a',
+            owner=params.hive_user,
+            group=params.user_group,
+            mode=0755)

http://git-wip-us.apache.org/repos/asf/ambari/blob/58e7b125/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
index 03fd40c..1205202 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
@@ -111,8 +111,6 @@ hive_conf_dir = status_params.hive_conf_dir
 hive_config_dir = status_params.hive_config_dir
 hive_client_conf_dir = status_params.hive_client_conf_dir
 hive_server_conf_dir = status_params.hive_server_conf_dir
-hive_interactive_conf_dir = status_params.hive_server_interactive_conf_dir
-
 
 hcat_conf_dir = '/etc/hive-hcatalog/conf'
 config_dir = '/etc/hive-webhcat/conf'
@@ -496,7 +494,7 @@ if has_hive_interactive:
   start_hiveserver2_interactive_path = format("{tmp_dir}/start_hiveserver2_interactive_script")
   hive_interactive_env_sh_template = config['configurations']['hive-interactive-env']['content']
   # Tez for Hive interactive related
-  tez_interactive_config_dir = os.path.realpath("/etc/tez_hive2/conf")
+  tez_interactive_config_dir = "/etc/tez_hive2/conf"
   tez_interactive_user = config['configurations']['tez-env']['tez_user']
   if security_enabled:
     hive_llap_keytab_file = config['configurations']['hive-interactive-site']['hive.llap.zk.sm.keytab.file']

http://git-wip-us.apache.org/repos/asf/ambari/blob/58e7b125/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py b/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py
new file mode 100644
index 0000000..ac4f6fc
--- /dev/null
+++ b/ambari-server/src/test/python/stacks/2.5/HIVE/test_hive_server_int.py
@@ -0,0 +1,249 @@
+#!/usr/bin/env python
+
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+from stacks.utils.RMFTestCase import *
+
+from mock.mock import MagicMock, patch
+from resource_management.libraries import functions
+from resource_management.core.logger import Logger
+from resource_management.libraries.script.config_dictionary import UnknownConfiguration
+
+
+@patch.object(functions, "get_stack_version", new=MagicMock(return_value="2.0.0.0-1234"))
+@patch("resource_management.libraries.functions.check_thrift_port_sasl", new=MagicMock())
+@patch("resource_management.libraries.functions.get_user_call_output.get_user_call_output",
+       new=MagicMock(return_value=(0, '123', '')))
+class TestHiveServerInteractive(RMFTestCase):
+  COMMON_SERVICES_PACKAGE_DIR = "HIVE/0.12.0.2.0/package"
+  STACK_VERSION = "2.0.6"
+  UPGRADE_STACK_VERSION = "2.2"
+  DEFAULT_IMMUTABLE_PATHS = ['/apps/hive/warehouse', '/apps/falcon', '/mr-history/done', '/app-logs', '/tmp']
+
+  def setUp(self):
+    Logger.logger = MagicMock()
+
+  @patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs")
+  def test_configure_default(self, copy_to_hdfs_mock):
+    self.maxDiff = None
+    copy_to_hdfs_mock.return_value = False
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server_interactive.py",
+                       classname="HiveServerInteractive",
+                       command="configure",
+                       config_file=self.get_src_folder() + "/test/python/stacks/2.5/configs/hsi_default.json",
+                       stack_version=self.STACK_VERSION,
+                       target=RMFTestCase.TARGET_COMMON_SERVICES
+    )
+    self.assert_configure_default()
+    self.assertNoMoreResources()
+
+  @patch("os.path.isfile")
+  @patch("resource_management.libraries.functions.copy_tarball.copy_to_hdfs")
+  @patch("socket.socket")
+  def test_start_default(self, socket_mock, copy_to_hfds_mock, is_file_mock):
+    self.maxDiff = None
+    copy_to_hfds_mock.return_value = False
+    s = socket_mock.return_value
+    is_file_mock.return_value = True
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server_interactive.py",
+                       classname="HiveServerInteractive",
+                       command="start",
+                       config_file=self.get_src_folder() + "/test/python/stacks/2.5/configs/hsi_default.json",
+                       stack_version=self.STACK_VERSION,
+                       target=RMFTestCase.TARGET_COMMON_SERVICES,
+                       checked_call_mocks=[(0, "Prepared llap-slider-05Apr2016/run.sh for running LLAP", ""), (0, "OK.", "")],
+    )
+
+    self.assert_configure_default()
+
+    self.assertResourceCalled('Execute',
+                              '/home/hive/llap-slider-05Apr2016/run.sh',
+                              user='hive'
+    )
+    self.assertResourceCalled('Execute',
+                              'hive --config /etc/hive2/conf.server --service metatool -updateLocation hdfs://c6401.ambari.apache.org:8020 OK.',
+                              environment={'PATH': '/usr/bin'},
+                              user='hive'
+    )
+    self.assertResourceCalled('Execute',
+                              '/tmp/start_hiveserver2_interactive_script /var/run/hive/hive-server2-interactive.out /var/log/hive/hive-server2-interactive.err /var/run/hive/hive-interactive.pid /etc/hive2/conf.server /var/log/hive',
+                              environment={'HADOOP_HOME': '/usr',
+                                           'HIVE_BIN': 'hive2',
+                                           'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'},
+                              not_if="ls /var/run/hive/hive-interactive.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1",
+                              user='hive',
+                              path=['/bin:/usr/lib/hive/bin:/usr/bin'],
+    )
+    self.assertResourceCalled('Execute',
+                              '/usr/jdk64/jdk1.7.0_45/bin/java -cp /usr/lib/ambari-agent/DBConnectionVerification.jar:/usr/lib/hive/lib//mysql-connector-java.jar org.apache.ambari.server.DBConnectionVerification \'jdbc:mysql://c6402.ambari.apache.org/hive?createDatabaseIfNotExist=true\' hive \'!`"\'"\'"\' 1\' com.mysql.jdbc.Driver',
+                              path=['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
+                              tries=5,
+                              try_sleep=10
+    )
+    self.assertNoMoreResources()
+
+
+  def test_stop_default(self):
+    self.maxDiff = None
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/hive_server_interactive.py",
+                       classname="HiveServerInteractive",
+                       command="stop",
+                       config_file=self.get_src_folder() + "/test/python/stacks/2.5/configs/hsi_default.json",
+                       stack_version=self.STACK_VERSION,
+                       target=RMFTestCase.TARGET_COMMON_SERVICES,
+                       call_mocks=[(0, "OK.", ""), (0, "OK.", "")],
+    )
+
+    self.assertResourceCalled('Execute', "ambari-sudo.sh kill 123",
+                              not_if="! (ls /var/run/hive/hive-interactive.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1)",
+    )
+    self.assertResourceCalled('Execute', "ambari-sudo.sh kill -9 123",
+                              not_if="! (ls /var/run/hive/hive-interactive.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1) || ( sleep 5 && ! (ls /var/run/hive/hive-interactive.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1) )",
+    )
+    self.assertResourceCalled('Execute',
+                              "! (ls /var/run/hive/hive-interactive.pid >/dev/null 2>&1 && ps -p 123 >/dev/null 2>&1)",
+                              tries=20,
+                              try_sleep=3,
+    )
+    self.assertResourceCalled('File', '/var/run/hive/hive-interactive.pid',
+                              action=['delete'],
+    )
+
+    self.assertNoMoreResources()
+
+
+  def assert_configure_default(self, no_tmp=False, default_fs_default='hdfs://c6401.ambari.apache.org:8020'):
+
+    self.assertResourceCalled('Directory', '/etc/hive2',
+                              mode=0755,
+    )
+    self.assertResourceCalled('Directory', '/etc/hive2/conf.server',
+                              owner='hive',
+                              group='hadoop',
+                              create_parents=True,
+    )
+
+    self.assertResourceCalled('XmlConfig', 'mapred-site.xml',
+                              group='hadoop',
+                              conf_dir='/etc/hive2/conf.server',
+                              mode=0644,
+                              configuration_attributes={u'final': {u'mapred.healthChecker.script.path': u'true',
+                                                                   u'mapreduce.jobtracker.staging.root.dir': u'true'}},
+                              owner='hive',
+                              configurations=self.getConfig()['configurations']['mapred-site'],
+    )
+    self.assertResourceCalled('File', '/etc/hive2/conf.server/hive-default.xml.template',
+                              owner='hive',
+                              group='hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive2/conf.server/hive-env.sh.template',
+                              owner='hive',
+                              group='hadoop',
+    )
+    self.assertResourceCalled('File', '/etc/hive2/conf.server/hive-exec-log4j.properties',
+                              content='log4jproperties\nline2',
+                              owner='hive',
+                              group='hadoop',
+                              mode=0644,
+    )
+    self.assertResourceCalled('File', '/etc/hive2/conf.server/hive-log4j.properties',
+                              content='log4jproperties\nline2',
+                              owner='hive',
+                              group='hadoop',
+                              mode=0644,
+    )
+    hive_site_conf = {}
+    hive_site_conf.update(self.getConfig()['configurations']['hive-site'])
+    hive_site_conf.update(self.getConfig()['configurations']['hive-interactive-site'])
+    del hive_site_conf['hive.enforce.bucketing']
+    del hive_site_conf['hive.enforce.sorting']
+    self.assertResourceCalled('XmlConfig', 'hive-site.xml',
+                              group='hadoop',
+                              conf_dir='/etc/hive2/conf.server',
+                              mode=0644,
+                              configuration_attributes={u'final': {u'hive.optimize.bucketmapjoin.sortedmerge': u'true',
+                                                                   u'javax.jdo.option.ConnectionDriverName': u'true',
+                                                                   u'javax.jdo.option.ConnectionPassword': u'true'}},
+                              owner='hive',
+                              configurations=hive_site_conf,
+    )
+    self.assertResourceCalled('XmlConfig', 'tez-site.xml',
+                              group='hadoop',
+                              conf_dir='/etc/tez_hive2/conf',
+                              mode=0664,
+                              configuration_attributes=UnknownConfigurationMock(),
+                              owner='tez',
+                              configurations=self.getConfig()['configurations']['tez-interactive-site'],
+                              )
+    self.assertResourceCalled('File', '/etc/hive2/conf.server/hive-env.sh',
+                              content=InlineTemplate(self.getConfig()['configurations']['hive-interactive-env']['content']),
+                              owner='hive',
+                              group='hadoop',
+    )
+    self.assertResourceCalled('Directory', '/etc/security/limits.d',
+                              owner='root',
+                              group='root',
+                              create_parents=True,
+    )
+    self.assertResourceCalled('File', '/etc/security/limits.d/hive.conf',
+                              content=Template('hive.conf.j2'),
+                              owner='root',
+                              group='root',
+                              mode=0644,
+    )
+    self.assertResourceCalled('Execute', ('cp',
+                                          '--remove-destination',
+                                          '/usr/share/java/mysql-connector-java.jar',
+                                          '/usr/lib/hive/lib//mysql-connector-java.jar'),
+                              path=['/bin', '/usr/bin/'],
+                              sudo=True,
+    )
+    self.assertResourceCalled('File', '/usr/lib/hive/lib//mysql-connector-java.jar',
+                              mode=0644,
+    )
+    self.assertResourceCalled('File', '/usr/lib/ambari-agent/DBConnectionVerification.jar',
+                              content=DownloadSource('http://c6401.ambari.apache.org:8080/resources'
+                                                     '/DBConnectionVerification.jar'),
+                              mode=0644,
+    )
+    self.assertResourceCalled('File', '/tmp/start_hiveserver2_interactive_script',
+                              content=Template('startHiveserver2Interactive.sh.j2'),
+                              mode=0755,
+    )
+    self.assertResourceCalled('Directory', '/var/run/hive',
+                              owner='hive',
+                              mode=0755,
+                              group='hadoop',
+                              create_parents=True,
+                              cd_access='a',
+    )
+    self.assertResourceCalled('Directory', '/var/log/hive',
+                              owner='hive',
+                              mode=0755,
+                              group='hadoop',
+                              create_parents=True,
+                              cd_access='a',
+    )
+    self.assertResourceCalled('Directory', '/var/lib/hive2',
+                              owner='hive',
+                              mode=0755,
+                              group='hadoop',
+                              create_parents=True,
+                              cd_access='a',
+    )
\ No newline at end of file