You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by sr...@apache.org on 2015/04/29 08:34:54 UTC

ambari git commit: AMBARI-10801. Hive Review: Multiple /recommendation API fixes (mpapirkovskyy via srimanth)

Repository: ambari
Updated Branches:
  refs/heads/trunk 47907b961 -> 98098a891


AMBARI-10801. Hive Review: Multiple /recommendation API fixes (mpapirkovskyy via srimanth)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/98098a89
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/98098a89
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/98098a89

Branch: refs/heads/trunk
Commit: 98098a89115f6e144e81dbd82a5c443bf7706c7d
Parents: 47907b9
Author: Srimanth Gunturi <sg...@hortonworks.com>
Authored: Tue Apr 28 22:40:54 2015 -0700
Committer: Srimanth Gunturi <sg...@hortonworks.com>
Committed: Tue Apr 28 23:16:35 2015 -0700

----------------------------------------------------------------------
 .../services/HIVE/configuration/hive-site.xml   |  2 +-
 .../stacks/HDP/2.2/services/stack_advisor.py    | 46 +++++++++++++++-----
 .../stacks/2.2/common/test_stack_advisor.py     | 44 ++++++++++++++++++-
 3 files changed, 79 insertions(+), 13 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/98098a89/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-site.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-site.xml b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-site.xml
index 4525345..38bef72 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-site.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/HIVE/configuration/hive-site.xml
@@ -179,7 +179,7 @@ limitations under the License.
     <value-attributes>
       <type>int</type>
       <minimum>64</minimum>
-      <maximum>4398046511104</maximum>
+      <maximum>4294967296</maximum>
       <unit>B</unit>
       <step-increment></step-increment>
     </value-attributes>

http://git-wip-us.apache.org/repos/asf/ambari/blob/98098a89/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
index d5e1534..7ce98b9 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/services/stack_advisor.py
@@ -232,9 +232,6 @@ class HDP22StackAdvisor(HDP21StackAdvisor):
     putHiveSiteProperty("hive.vectorized.execution.enabled", "true")
     putHiveSiteProperty("hive.vectorized.execution.reduce.enabled", "false")
 
-    # Memory
-    putHiveSiteProperty("hive.exec.reducers.bytes.per.reducer", "67108864")
-
     # Transactions
     putHiveEnvProperty("hive_txn_acid", "Off")
     if str(configurations["hive-env"]["properties"]["hive_txn_acid"]).lower() == "on":
@@ -296,22 +293,24 @@ class HDP22StackAdvisor(HDP21StackAdvisor):
       "yarn.scheduler.minimum-allocation-mb" in configurations["yarn-site"]["properties"]:
       container_size = configurations["yarn-site"]["properties"]["yarn.scheduler.minimum-allocation-mb"]
     putHiveSiteProperty("hive.tez.container.size", container_size)
-    putHiveSiteProperty("hive.auto.convert.join.noconditionaltask.size", str(int(int(container_size)/3)*1024*1024))
     putHiveSiteProperty("hive.prewarm.enabled", "false")
     putHiveSiteProperty("hive.prewarm.numcontainers", "3")
     putHiveSiteProperty("hive.tez.auto.reducer.parallelism", "true")
     putHiveSiteProperty("hive.tez.dynamic.partition.pruning", "true")
 
+    # Memory
+    putHiveSiteProperty("hive.auto.convert.join.noconditionaltask.size", int(int(container_size)*1024*1024/3))
+    putHiveSiteProperty("hive.exec.reducers.bytes.per.reducer", "67108864")
+
     # CBO
     putHiveEnvProperty("cost_based_optimizer", "On")
     if str(configurations["hive-env"]["properties"]["cost_based_optimizer"]).lower() == "on":
       putHiveSiteProperty("hive.cbo.enable", "true")
-      putHiveSiteProperty("hive.stats.fetch.partition.stats", "true")
-      putHiveSiteProperty("hive.stats.fetch.column.stats", "true")
     else:
       putHiveSiteProperty("hive.cbo.enable", "false")
-      putHiveSiteProperty("hive.stats.fetch.partition.stats", "false")
-      putHiveSiteProperty("hive.stats.fetch.column.stats", "false")
+    hive_cbo_enable = configurations["hive-site"]["properties"]["hive.cbo.enable"]
+    putHiveSiteProperty("hive.stats.fetch.partition.stats", hive_cbo_enable)
+    putHiveSiteProperty("hive.stats.fetch.column.stats", hive_cbo_enable)
     putHiveSiteProperty("hive.compute.query.using.stats ", "true")
 
     # Interactive Query
@@ -340,11 +339,22 @@ class HDP22StackAdvisor(HDP21StackAdvisor):
     else:
       putHiveSiteProperty("hive.security.authorization.enabled", "true")
 
-    if str(configurations["hive-env"]["properties"]["hive_security_authorization"]).lower() == "sqlstdauth":
+    try:
       auth_manager_value = str(configurations["hive-env"]["properties"]["hive.security.metastore.authorization.manager"])
-      sqlstdauth_class = "org.apache.hadoop.hive.ql.security.authorization.MetaStoreAuthzAPIAuthorizerEmbedOnly"
+    except KeyError:
+      auth_manager_value = ''
+      pass
+    sqlstdauth_class = "org.apache.hadoop.hive.ql.security.authorization.MetaStoreAuthzAPIAuthorizerEmbedOnly"
+
+    if str(configurations["hive-env"]["properties"]["hive_security_authorization"]).lower() == "sqlstdauth":
       if sqlstdauth_class not in auth_manager_value:
         putHiveSiteProperty("hive.security.metastore.authorization.manager", auth_manager_value + "," + sqlstdauth_class)
+    elif auth_manager_value != '':
+      #remove item from csv
+      auth_manager_values = auth_manager_value.split(",")
+      auth_manager_values = [x for x in auth_manager_values if x != sqlstdauth_class]
+      putHiveSiteProperty("hive.security.metastore.authorization.manager", ",".join(auth_manager_values))
+      pass
 
     putHiveServerProperty("hive.server2.enable.doAs", "true")
     putHiveSiteProperty("hive.server2.use.SSL", "false")
@@ -445,7 +455,9 @@ class HDP22StackAdvisor(HDP21StackAdvisor):
       "HDFS": {"hdfs-site": self.validateHDFSConfigurations,
                "hadoop-env": self.validateHDFSConfigurationsEnv},
       "YARN": {"yarn-env": self.validateYARNEnvConfigurations},
-      "HIVE": {"hiveserver2-site": self.validateHiveServer2Configurations, "hive-site": self.validateHiveConfigurations},
+      "HIVE": {"hiveserver2-site": self.validateHiveServer2Configurations,
+               "hive-site": self.validateHiveConfigurations,
+               "hive-env": self.validateHiveConfigurationsEnv},
       "HBASE": {"hbase-site": self.validateHBASEConfigurations,
                 "hbase-env": self.validateHBASEEnvConfigurations},
       "MAPREDUCE2": {"mapred-site": self.validateMapReduce2Configurations},
@@ -660,6 +672,18 @@ class HDP22StackAdvisor(HDP21StackAdvisor):
                                   " {0} needs to be set to {1}".format(prop_name,prop_val))})
     return self.toConfigurationValidationProblems(validationItems, "hiveserver2-site")
 
+  def validateHiveConfigurationsEnv(self, properties, recommendedDefaults, configurations, services, hosts):
+    validationItems = []
+    hive_env = properties
+    hive_site = getSiteProperties(configurations, "hive-site")
+    if str(hive_env["hive_security_authorization"]).lower() == "none" \
+      and str(hive_site["hive.security.authorization.enabled"]).lower() == "true":
+      authorization_item = self.getErrorItem("hive_security_authorization should not be None "
+                                             "if hive.security.authorization.enabled is set")
+      validationItems.append({"config-name": "hive_security_authorization", "item": authorization_item})
+
+    return self.toConfigurationValidationProblems(validationItems, "hive-env")
+
   def validateHiveConfigurations(self, properties, recommendedDefaults, configurations, services, hosts):
     super(HDP22StackAdvisor, self).validateHiveConfigurations(properties, recommendedDefaults, configurations, services, hosts)
     hive_site = properties

http://git-wip-us.apache.org/repos/asf/ambari/blob/98098a89/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py b/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
index e0a5006..53439bf 100644
--- a/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
+++ b/ambari-server/src/test/python/stacks/2.2/common/test_stack_advisor.py
@@ -832,6 +832,7 @@ class TestHDP22StackAdvisor(TestCase):
     self.assertEquals(configurations, expected)
 
   def test_recommendHiveConfigurationAttributes(self):
+    self.maxDiff = None
     configurations = {
       "yarn-site": {
         "properties": {
@@ -874,7 +875,7 @@ class TestHDP22StackAdvisor(TestCase):
       },
       'hive-site': {
         'properties': {
-          'hive.auto.convert.join.noconditionaltask.size': '89128960',
+          'hive.auto.convert.join.noconditionaltask.size': '89478485',
           'hive.cbo.enable': 'true',
           'hive.compactor.initiator.on': 'false',
           'hive.compactor.worker.threads': '0',
@@ -1042,6 +1043,22 @@ class TestHDP22StackAdvisor(TestCase):
     self.stackAdvisor.recommendHIVEConfigurations(configurations, clusterData, services, hosts)
     self.assertEquals(configurations, expected)
 
+    #test recommendations
+    configurations = expected
+    configurations["hive-site"]["properties"]["hive.cbo.enable"] = "false"
+    configurations["hive-env"]["properties"]["hive_security_authorization"] = "sqlstdauth"
+    services["configurations"] = configurations
+    services["changed-configurations"] = [{"type": "hive-site", "key": "hive.cbo.enable"},
+                                          {"type": "hive-env", "key": "hive_security_authorization"}]
+    expected["hive-site"]["properties"]["hive.stats.fetch.partition.stats"]="false"
+    expected["hive-site"]["properties"]["hive.stats.fetch.column.stats"]="false"
+    expected["hive-site"]["properties"]["hive.security.metastore.authorization.manager"]=\
+      ",org.apache.hadoop.hive.ql.security.authorization.MetaStoreAuthzAPIAuthorizerEmbedOnly"
+
+    self.stackAdvisor.recommendHIVEConfigurations(configurations, clusterData, services, hosts)
+    self.assertEquals(configurations, expected)
+
+
   def test_recommendMapredConfigurationAttributes(self):
     configurations = {
       "mapred-site": {
@@ -1878,6 +1895,31 @@ class TestHDP22StackAdvisor(TestCase):
     res = self.stackAdvisor.validateMapReduce2Configurations(properties, recommendedDefaults, {}, '', '')
     self.assertEquals(res, res_expected)
 
+  def test_validateHiveConfigurationsEnv(self):
+    properties = {"hive_security_authorization": "None"}
+    configurations = {"hive-site": {
+                        "properties": {"hive.security.authorization.enabled": "true"}
+                      },
+                      "hive-env": {
+                        "properties": {"hive_security_authorization": "None"}
+                      }
+    }
+
+    res_expected = [
+      {
+        "config-type": "hive-env",
+        "message": "hive_security_authorization should not be None if hive.security.authorization.enabled is set",
+        'type': 'configuration',
+        "config-name": "hive_security_authorization",
+        "level": "ERROR"
+      }
+    ]
+
+    res = self.stackAdvisor.validateHiveConfigurationsEnv(properties, {}, configurations, {}, {})
+    self.assertEquals(res, res_expected)
+
+    pass
+
   def test_recommendYarnCGroupConfigurations(self):
     servicesList = ["YARN"]
     configurations = {}