You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@bigtop.apache.org by kw...@apache.org on 2017/02/04 03:04:30 UTC
bigtop git commit: BIGTOP-2490: Spark in HA when Zookeeper is
available (closes #139)
Repository: bigtop
Updated Branches:
refs/heads/master 8e4d9734e -> f627c7868
BIGTOP-2490: Spark in HA when Zookeeper is available (closes #139)
Signed-off-by: Kevin W Monroe <ke...@canonical.com>
Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/f627c786
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/f627c786
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/f627c786
Branch: refs/heads/master
Commit: f627c7868cb1123f6595380a7325f538ebdb8913
Parents: 8e4d973
Author: Konstantinos Tsakalozos <ko...@canonical.com>
Authored: Tue Jul 5 20:25:27 2016 +0300
Committer: Kevin W Monroe <ke...@canonical.com>
Committed: Fri Feb 3 21:03:31 2017 -0600
----------------------------------------------------------------------
bigtop-deploy/puppet/hieradata/bigtop/cluster.yaml | 3 +++
bigtop-deploy/puppet/modules/spark/manifests/init.pp | 9 ++++++++-
.../puppet/modules/spark/templates/spark-defaults.conf | 2 ++
bigtop-deploy/puppet/modules/spark/templates/spark-env.sh | 2 ++
bigtop-packages/src/common/spark/spark-worker.svc | 2 +-
5 files changed, 16 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/bigtop/blob/f627c786/bigtop-deploy/puppet/hieradata/bigtop/cluster.yaml
----------------------------------------------------------------------
diff --git a/bigtop-deploy/puppet/hieradata/bigtop/cluster.yaml b/bigtop-deploy/puppet/hieradata/bigtop/cluster.yaml
index 7779d21..00cb044 100644
--- a/bigtop-deploy/puppet/hieradata/bigtop/cluster.yaml
+++ b/bigtop-deploy/puppet/hieradata/bigtop/cluster.yaml
@@ -146,7 +146,10 @@ hadoop_oozie::server::kerberos_realm: "%{hiera('kerberos::site::realm')}"
hcatalog::server::kerberos_realm: "%{hiera('kerberos::site::realm')}"
hcatalog::webhcat::server::kerberos_realm: "%{hiera('kerberos::site::realm')}"
+# spark
spark::common::master_host: "%{hiera('bigtop::hadoop_head_node')}"
+# to enable spark HA, ensure zookeeper is available and uncomment the line below
+#spark::common::zookeeper_connection_string: "%{hiera('hadoop::zk')}"
alluxio::common::master_host: "%{hiera('bigtop::hadoop_head_node')}"
http://git-wip-us.apache.org/repos/asf/bigtop/blob/f627c786/bigtop-deploy/puppet/modules/spark/manifests/init.pp
----------------------------------------------------------------------
diff --git a/bigtop-deploy/puppet/modules/spark/manifests/init.pp b/bigtop-deploy/puppet/modules/spark/manifests/init.pp
index 7bc4a5b..1b2ff6b 100644
--- a/bigtop-deploy/puppet/modules/spark/manifests/init.pp
+++ b/bigtop-deploy/puppet/modules/spark/manifests/init.pp
@@ -109,7 +109,7 @@ class spark {
],
hasrestart => true,
hasstatus => true,
- }
+ }
}
class yarn {
@@ -137,6 +137,7 @@ class spark {
class common(
$master_url = 'yarn',
$master_host = $fqdn,
+ $zookeeper_connection_string = undef,
$master_port = 7077,
$worker_port = 7078,
$master_ui_port = 8080,
@@ -158,6 +159,12 @@ class spark {
ensure => latest,
}
+ if $zookeeper_connection_string == undef {
+ $spark_daemon_java_opts = "\"-Dspark.deploy.recoveryMode=NONE\""
+ } else {
+ $spark_daemon_java_opts = "\"-Dspark.deploy.recoveryMode=ZOOKEEPER -Dspark.deploy.zookeeper.url=${zookeeper_connection_string}\""
+ }
+
file { '/etc/spark/conf/spark-env.sh':
content => template('spark/spark-env.sh'),
require => Package['spark-core'],
http://git-wip-us.apache.org/repos/asf/bigtop/blob/f627c786/bigtop-deploy/puppet/modules/spark/templates/spark-defaults.conf
----------------------------------------------------------------------
diff --git a/bigtop-deploy/puppet/modules/spark/templates/spark-defaults.conf b/bigtop-deploy/puppet/modules/spark/templates/spark-defaults.conf
index 30946b3..cd18235 100644
--- a/bigtop-deploy/puppet/modules/spark/templates/spark-defaults.conf
+++ b/bigtop-deploy/puppet/modules/spark/templates/spark-defaults.conf
@@ -17,6 +17,8 @@ spark.master <%= @master_url %>
spark.eventLog.enabled true
spark.eventLog.dir <%= @event_log_dir %>
spark.history.fs.logDirectory <%= @history_log_dir %>
+<% if @master_url =~ /^yarn/ -%>
spark.yarn.historyServer.address <%= @master_host %>:<%= @history_ui_port %>
+<% end -%>
spark.history.ui.port <%= @history_ui_port %>
spark.shuffle.service.enabled <%= @use_yarn_shuffle_service %>
http://git-wip-us.apache.org/repos/asf/bigtop/blob/f627c786/bigtop-deploy/puppet/modules/spark/templates/spark-env.sh
----------------------------------------------------------------------
diff --git a/bigtop-deploy/puppet/modules/spark/templates/spark-env.sh b/bigtop-deploy/puppet/modules/spark/templates/spark-env.sh
index eb351c7..0aabc65 100755
--- a/bigtop-deploy/puppet/modules/spark/templates/spark-env.sh
+++ b/bigtop-deploy/puppet/modules/spark/templates/spark-env.sh
@@ -16,6 +16,7 @@
export SPARK_HOME=${SPARK_HOME:-/usr/lib/spark}
export SPARK_LOG_DIR=${SPARK_LOG_DIR:-/var/log/spark}
+export SPARK_DAEMON_JAVA_OPTS=<%= @spark_daemon_java_opts %>
export HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}
export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-/etc/hadoop/conf}
export HIVE_CONF_DIR=${HIVE_CONF_DIR:-/etc/hive/conf}
@@ -23,6 +24,7 @@ export HIVE_CONF_DIR=${HIVE_CONF_DIR:-/etc/hive/conf}
export STANDALONE_SPARK_MASTER_HOST=<%= @master_host %>
export SPARK_MASTER_PORT=<%= @master_port %>
export SPARK_MASTER_IP=$STANDALONE_SPARK_MASTER_HOST
+export SPARK_MASTER_URL=<%= @master_url %>
export SPARK_MASTER_WEBUI_PORT=<%= @master_ui_port %>
export SPARK_WORKER_DIR=${SPARK_WORKER_DIR:-/var/run/spark/work}
http://git-wip-us.apache.org/repos/asf/bigtop/blob/f627c786/bigtop-packages/src/common/spark/spark-worker.svc
----------------------------------------------------------------------
diff --git a/bigtop-packages/src/common/spark/spark-worker.svc b/bigtop-packages/src/common/spark/spark-worker.svc
index 7c12374..445d344 100644
--- a/bigtop-packages/src/common/spark/spark-worker.svc
+++ b/bigtop-packages/src/common/spark/spark-worker.svc
@@ -44,7 +44,7 @@ start() {
fi
su -s /bin/bash $SVC_USER -c "nohup nice -n 0 \
- ${EXEC_PATH} org.apache.spark.deploy.worker.Worker spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT $DAEMON_FLAGS \
+ ${EXEC_PATH} org.apache.spark.deploy.worker.Worker $SPARK_MASTER_URL $DAEMON_FLAGS \
> $LOG_FILE 2>&1 & "'echo $!' > "$PIDFILE"
sleep 3