You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@dlab.apache.org by om...@apache.org on 2019/04/09 13:11:16 UTC
[incubator-dlab] 01/01: [DLAB-596]: Fixed issue with header
structure after Dataengine creation with custom Spark configuration
This is an automated email from the ASF dual-hosted git repository.
omartushevskyi pushed a commit to branch DLAB-596
in repository https://gitbox.apache.org/repos/asf/incubator-dlab.git
commit ae2926a18dc97f64c5f04b1acb2abc41ee0d9001
Author: Oleh Martushevskyi <Ol...@epam.com>
AuthorDate: Tue Apr 9 16:11:05 2019 +0300
[DLAB-596]: Fixed issue with header structure after Dataengine creation with custom Spark configuration
---
.../src/general/scripts/os/common_configure_spark.py | 20 +++++++++++---------
1 file changed, 11 insertions(+), 9 deletions(-)
diff --git a/infrastructure-provisioning/src/general/scripts/os/common_configure_spark.py b/infrastructure-provisioning/src/general/scripts/os/common_configure_spark.py
index 0d104db..fa15496 100644
--- a/infrastructure-provisioning/src/general/scripts/os/common_configure_spark.py
+++ b/infrastructure-provisioning/src/general/scripts/os/common_configure_spark.py
@@ -53,21 +53,23 @@ def update_spark_defaults_conf(spark_conf):
def add_custom_spark_properties(cluster_name):
try:
+ dlab_header = sudo('cat /tmp/{0}/notebook_spark-defaults_local.conf | grep "^#"'.format(cluster_name))
spark_configurations = ast.literal_eval(os.environ['spark_configurations'])
new_spark_defaults = list()
spark_defaults = sudo('cat /opt/{0}/spark/conf/spark-defaults.conf'.format(cluster_name))
current_spark_properties = spark_defaults.split('\n')
for param in current_spark_properties:
- for config in spark_configurations:
- if config['Classification'] == 'spark-defaults':
- for property in config['Properties']:
- if property == param.split(' ')[0]:
- param = property + ' ' + config['Properties'][property]
- else:
- new_spark_defaults.append(property + ' ' + config['Properties'][property])
- new_spark_defaults.append(param)
+ if param.split(' ')[0] != '#':
+ for config in spark_configurations:
+ if config['Classification'] == 'spark-defaults':
+ for property in config['Properties']:
+ if property == param.split(' ')[0]:
+ param = property + ' ' + config['Properties'][property]
+ else:
+ new_spark_defaults.append(property + ' ' + config['Properties'][property])
+ new_spark_defaults.append(param)
new_spark_defaults = set(new_spark_defaults)
- sudo('echo "" > /opt/{0}/spark/conf/spark-defaults.conf'.format(cluster_name))
+ sudo("echo '{0}' > /opt/{1}/spark/conf/spark-defaults.conf".format(dlab_header, cluster_name))
for prop in new_spark_defaults:
prop = prop.rstrip()
sudo('echo "{0}" >> /opt/{1}/spark/conf/spark-defaults.conf'.format(prop, cluster_name))
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@dlab.apache.org
For additional commands, e-mail: commits-help@dlab.apache.org