You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@bigtop.apache.org by kw...@apache.org on 2017/05/11 16:42:17 UTC

bigtop git commit: BIGTOP-2765: fix roles logic for spark/zeppelin charms

Repository: bigtop
Updated Branches:
  refs/heads/master 42146ecbf -> 20bfd7d0a


BIGTOP-2765: fix roles logic for spark/zeppelin charms

Closes #206


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/20bfd7d0
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/20bfd7d0
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/20bfd7d0

Branch: refs/heads/master
Commit: 20bfd7d0a60bf9c4332484849fa24c0a3f4b5c24
Parents: 42146ec
Author: Kevin W Monroe <ke...@canonical.com>
Authored: Wed May 10 17:26:27 2017 +0000
Committer: Kevin W Monroe <ke...@canonical.com>
Committed: Thu May 11 11:41:44 2017 -0500

----------------------------------------------------------------------
 .../lib/charms/layer/bigtop_spark.py            | 40 +++++++++++---------
 .../lib/charms/layer/bigtop_zeppelin.py         | 16 ++++----
 2 files changed, 29 insertions(+), 27 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/20bfd7d0/bigtop-packages/src/charm/spark/layer-spark/lib/charms/layer/bigtop_spark.py
----------------------------------------------------------------------
diff --git a/bigtop-packages/src/charm/spark/layer-spark/lib/charms/layer/bigtop_spark.py b/bigtop-packages/src/charm/spark/layer-spark/lib/charms/layer/bigtop_spark.py
index 1be1072..fa8b5e7 100755
--- a/bigtop-packages/src/charm/spark/layer-spark/lib/charms/layer/bigtop_spark.py
+++ b/bigtop-packages/src/charm/spark/layer-spark/lib/charms/layer/bigtop_spark.py
@@ -18,7 +18,6 @@ from jujubigdata import utils
 from path import Path
 
 from charms.layer.apache_bigtop_base import Bigtop
-from charms.reactive import is_state
 from charms import layer
 from charmhelpers.core import hookenv, host, unitdata
 from charmhelpers.fetch.archiveurl import ArchiveUrlFetchHandler
@@ -52,14 +51,6 @@ class Spark(object):
             master = 'yarn-client'
         return master
 
-    def get_roles(self):
-        roles = ['spark-worker', 'spark-client']
-        zk_units = unitdata.kv().get('zookeeper.units', [])
-        if is_state('leadership.is_leader') or zk_units:
-            roles.append('spark-master')
-            roles.append('spark-history-server')
-        return roles
-
     def install_benchmark(self):
         """
         Install and configure SparkBench.
@@ -182,23 +173,27 @@ class Spark(object):
 
         :param dict available_hosts: Hosts that Spark should know about.
         """
-        unitdata.kv().set('zookeeper.units', zk_units)
-        unitdata.kv().set('sparkpeer.units', peers)
-        unitdata.kv().flush(True)
-
+        # Bootstrap spark
         if not unitdata.kv().get('spark.bootstrapped', False):
             self.setup()
             unitdata.kv().set('spark.bootstrapped', True)
 
+        # Set KV based on connected applications
+        unitdata.kv().set('zookeeper.units', zk_units)
+        unitdata.kv().set('sparkpeer.units', peers)
+        unitdata.kv().flush(True)
+
+        # Get our config ready
+        dc = self.dist_config
+        events_log_dir = 'file://{}'.format(dc.path('spark_events'))
         mode = hookenv.config()['spark_execution_mode']
         master_ip = utils.resolve_private_address(available_hosts['spark-master'])
         master_url = self.get_master_url(master_ip)
+
+        # Setup hosts dict
         hosts = {
             'spark': master_ip,
         }
-
-        dc = self.dist_config
-        events_log_dir = 'file://{}'.format(dc.path('spark_events'))
         if 'namenode' in available_hosts:
             hosts['namenode'] = available_hosts['namenode']
             events_log_dir = self.setup_hdfs_logs()
@@ -206,14 +201,22 @@ class Spark(object):
         if 'resourcemanager' in available_hosts:
             hosts['resourcemanager'] = available_hosts['resourcemanager']
 
-        roles = self.get_roles()
+        # Setup roles dict. We always include the history server and client.
+        # Determine other roles based on our execution mode.
+        roles = ['spark-history-server', 'spark-client']
+        if mode == 'standalone':
+            roles.append('spark-master')
+            roles.append('spark-worker')
+        elif mode.startswith('yarn'):
+            roles.append('spark-on-yarn')
+            roles.append('spark-yarn-slave')
 
+        # Setup overrides dict
         override = {
             'spark::common::master_url': master_url,
             'spark::common::event_log_dir': events_log_dir,
             'spark::common::history_log_dir': events_log_dir,
         }
-
         if zk_units:
             zks = []
             for unit in zk_units:
@@ -225,6 +228,7 @@ class Spark(object):
         else:
             override['spark::common::zookeeper_connection_string'] = None
 
+        # Create our site.yaml and trigger puppet
         bigtop = Bigtop()
         bigtop.render_site_yaml(hosts, roles, override)
         bigtop.trigger_puppet()

http://git-wip-us.apache.org/repos/asf/bigtop/blob/20bfd7d0/bigtop-packages/src/charm/zeppelin/layer-zeppelin/lib/charms/layer/bigtop_zeppelin.py
----------------------------------------------------------------------
diff --git a/bigtop-packages/src/charm/zeppelin/layer-zeppelin/lib/charms/layer/bigtop_zeppelin.py b/bigtop-packages/src/charm/zeppelin/layer-zeppelin/lib/charms/layer/bigtop_zeppelin.py
index bb7cf08..0bb545a 100644
--- a/bigtop-packages/src/charm/zeppelin/layer-zeppelin/lib/charms/layer/bigtop_zeppelin.py
+++ b/bigtop-packages/src/charm/zeppelin/layer-zeppelin/lib/charms/layer/bigtop_zeppelin.py
@@ -92,15 +92,6 @@ class Zeppelin(object):
         self.wait_for_api(30)
         ##########
 
-        ##########
-        # BUG: BIGTOP-2154
-        # The zep deb depends on spark-core and spark-python. However, because
-        # of the unholy requirement to have hive tightly coupled to spark,
-        # we need to ensure spark-datanucleus is installed. Do this after the
-        # initial install so the bigtop repo is available to us.
-        utils.run_as('root', 'apt-get', 'install', '-qy', 'spark-datanucleus')
-        ##########
-
     def trigger_bigtop(self):
         '''
         Trigger the Bigtop puppet recipe that handles the Zeppelin service.
@@ -108,8 +99,15 @@ class Zeppelin(object):
         bigtop = Bigtop()
         overrides = unitdata.kv().getrange('zeppelin.bigtop.overrides.',
                                            strip=True)
+
+        # The zep deb depends on spark-core, spark-python, and unfortunately,
+        # most of hadoop. Include appropriate roles here to ensure these
+        # packages are configured in the same way as our other Bigtop
+        # software deployed with puppet.
         bigtop.render_site_yaml(
             roles=[
+                'spark-client',
+                'spark-yarn-slave',
                 'zeppelin-server',
             ],
             overrides=overrides,