You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by sm...@apache.org on 2015/11/26 10:04:32 UTC
[4/4] ambari git commit: AMBARI-13980. Make sure Hive is
selected/deployed when installing Spark Thrift server (Saisai Shao via
smohanty)
AMBARI-13980. Make sure Hive is selected/deployed when installing Spark Thrift server (Saisai Shao via smohanty)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0585b5af
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0585b5af
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0585b5af
Branch: refs/heads/branch-2.1
Commit: 0585b5af5a9fcef71648e2af8c474d21053c77c2
Parents: 3d57232
Author: Sumit Mohanty <sm...@hortonworks.com>
Authored: Thu Nov 26 01:04:17 2015 -0800
Committer: Sumit Mohanty <sm...@hortonworks.com>
Committed: Thu Nov 26 01:04:17 2015 -0800
----------------------------------------------------------------------
.../SPARK/1.4.1.2.3/metainfo.xml | 11 +-
.../stacks/HDP/2.3/services/stack_advisor.py | 22 +-
.../2.3/common/services-sparkts-hive.json | 10043 +++++++++++++++++
.../stacks/2.3/common/services-sparkts.json | 5860 ++++++++++
.../python/stacks/2.3/common/sparkts-host.json | 220 +
.../stacks/2.3/common/test_stack_advisor.py | 101 +-
6 files changed, 16220 insertions(+), 37 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/0585b5af/ambari-server/src/main/resources/common-services/SPARK/1.4.1.2.3/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.4.1.2.3/metainfo.xml b/ambari-server/src/main/resources/common-services/SPARK/1.4.1.2.3/metainfo.xml
index 0923dda..8dcb39d 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.4.1.2.3/metainfo.xml
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.4.1.2.3/metainfo.xml
@@ -52,8 +52,15 @@
<scope>host</scope>
<auto-deploy>
<enabled>true</enabled>
- </auto-deploy>
- </dependency>
+ </auto-deploy>
+ </dependency>
+ <dependency>
+ <name>HIVE/HIVE_METASTORE</name>
+ <scope>cluster</scope>
+ <auto-deploy>
+ <enabled>true</enabled>
+ </auto-deploy>
+ </dependency>
</dependencies>
<commandScript>
<script>scripts/spark_thrift_server.py</script>
http://git-wip-us.apache.org/repos/asf/ambari/blob/0585b5af/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py b/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
index d96aa57..9fb9e24 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3/services/stack_advisor.py
@@ -39,7 +39,10 @@ class HDP23StackAdvisor(HDP22StackAdvisor):
def getComponentLayoutValidations(self, services, hosts):
parentItems = super(HDP23StackAdvisor, self).getComponentLayoutValidations(services, hosts)
- if not "HAWQ" in [service["StackServices"]["service_name"] for service in services["services"]]:
+ hiveExists = "HIVE" in [service["StackServices"]["service_name"] for service in services["services"]]
+ sparkExists = "SPARK" in [service["StackServices"]["service_name"] for service in services["services"]]
+
+ if not "HAWQ" in [service["StackServices"]["service_name"] for service in services["services"]] and not sparkExists:
return parentItems
childItems = []
@@ -73,7 +76,20 @@ class HDP23StackAdvisor(HDP22StackAdvisor):
"If you leave them collocated, make sure to set HAWQ Master Port property " \
"to a value different from the port number used by Ambari Server database."
childItems.append( { "type": 'host-component', "level": 'WARN', "message": message, "component-name": 'HAWQSTANDBY', "host": host } )
-
+
+ if "SPARK_THRIFTSERVER" in [service["StackServices"]["service_name"] for service in services["services"]]:
+ if not "HIVE_SERVER" in [service["StackServices"]["service_name"] for service in services["services"]]:
+ message = "SPARK_THRIFTSERVER requires HIVE services to be selected."
+ childItems.append( {"type": 'host-component', "level": 'ERROR', "message": messge, "component-name": 'SPARK_THRIFTSERVER'} )
+
+ hmsHosts = [component["StackServiceComponents"]["hostnames"] for component in componentsList if component["StackServiceComponents"]["component_name"] == "HIVE_METASTORE"][0] if hiveExists else []
+ sparkTsHosts = [component["StackServiceComponents"]["hostnames"] for component in componentsList if component["StackServiceComponents"]["component_name"] == "SPARK_THRIFTSERVER"][0] if sparkExists else []
+
+ # if Spark Thrift Server is deployed but no Hive Server is deployed
+ if len(sparkTsHosts) > 0 and len(hmsHosts) == 0:
+ message = "SPARK_THRIFTSERVER requires HIVE_METASTORE to be selected/deployed."
+ childItems.append( { "type": 'host-component', "level": 'ERROR', "message": message, "component-name": 'SPARK_THRIFTSERVER' } )
+
parentItems.extend(childItems)
return parentItems
@@ -567,7 +583,7 @@ class HDP23StackAdvisor(HDP22StackAdvisor):
"HIVE": {"hiveserver2-site": self.validateHiveServer2Configurations,
"hive-site": self.validateHiveConfigurations},
"HBASE": {"hbase-site": self.validateHBASEConfigurations},
- "KAKFA": {"kafka-broker": self.validateKAFKAConfigurations}
+ "KAKFA": {"kafka-broker": self.validateKAFKAConfigurations}
}
self.mergeValidators(parentValidators, childValidators)
return parentValidators