You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@dlab.apache.org by lf...@apache.org on 2020/09/04 06:59:00 UTC

[incubator-dlab] branch DLAB-1980 updated: [DLAB-1980]: added check if /opt/{cluster_name} exist during custom spark creation

This is an automated email from the ASF dual-hosted git repository.

lfrolov pushed a commit to branch DLAB-1980
in repository https://gitbox.apache.org/repos/asf/incubator-dlab.git


The following commit(s) were added to refs/heads/DLAB-1980 by this push:
     new b2044ca  [DLAB-1980]: added check if /opt/{cluster_name} exist during custom spark creation
b2044ca is described below

commit b2044ca12086ae1afccb37ab6f94d5869c11fb59
Author: leonidfrolov <fr...@gmail.com>
AuthorDate: Fri Sep 4 09:58:08 2020 +0300

    [DLAB-1980]: added check if /opt/{cluster_name} exist during custom spark creation
---
 .../general/scripts/os/common_configure_spark.py   | 43 +++++++++++-----------
 1 file changed, 22 insertions(+), 21 deletions(-)

diff --git a/infrastructure-provisioning/src/general/scripts/os/common_configure_spark.py b/infrastructure-provisioning/src/general/scripts/os/common_configure_spark.py
index fa15496..dce2f5b 100644
--- a/infrastructure-provisioning/src/general/scripts/os/common_configure_spark.py
+++ b/infrastructure-provisioning/src/general/scripts/os/common_configure_spark.py
@@ -53,27 +53,28 @@ def update_spark_defaults_conf(spark_conf):
 
 def add_custom_spark_properties(cluster_name):
     try:
-        dlab_header = sudo('cat /tmp/{0}/notebook_spark-defaults_local.conf | grep "^#"'.format(cluster_name))
-        spark_configurations = ast.literal_eval(os.environ['spark_configurations'])
-        new_spark_defaults = list()
-        spark_defaults = sudo('cat /opt/{0}/spark/conf/spark-defaults.conf'.format(cluster_name))
-        current_spark_properties = spark_defaults.split('\n')
-        for param in current_spark_properties:
-            if param.split(' ')[0] != '#':
-                for config in spark_configurations:
-                    if config['Classification'] == 'spark-defaults':
-                        for property in config['Properties']:
-                            if property == param.split(' ')[0]:
-                                param = property + ' ' + config['Properties'][property]
-                            else:
-                                new_spark_defaults.append(property + ' ' + config['Properties'][property])
-                new_spark_defaults.append(param)
-        new_spark_defaults = set(new_spark_defaults)
-        sudo("echo '{0}' > /opt/{1}/spark/conf/spark-defaults.conf".format(dlab_header, cluster_name))
-        for prop in new_spark_defaults:
-            prop = prop.rstrip()
-            sudo('echo "{0}" >> /opt/{1}/spark/conf/spark-defaults.conf'.format(prop, cluster_name))
-        sudo('sed -i "/^\s*$/d" /opt/{0}/spark/conf/spark-defaults.conf'.format(cluster_name))
+        if os.path.exists('/opt/{0}'.format(cluster_name)):
+            dlab_header = sudo('cat /tmp/{0}/notebook_spark-defaults_local.conf | grep "^#"'.format(cluster_name))
+            spark_configurations = ast.literal_eval(os.environ['spark_configurations'])
+            new_spark_defaults = list()
+            spark_defaults = sudo('cat /opt/{0}/spark/conf/spark-defaults.conf'.format(cluster_name))
+            current_spark_properties = spark_defaults.split('\n')
+            for param in current_spark_properties:
+                if param.split(' ')[0] != '#':
+                    for config in spark_configurations:
+                        if config['Classification'] == 'spark-defaults':
+                            for property in config['Properties']:
+                                if property == param.split(' ')[0]:
+                                    param = property + ' ' + config['Properties'][property]
+                                else:
+                                    new_spark_defaults.append(property + ' ' + config['Properties'][property])
+                    new_spark_defaults.append(param)
+            new_spark_defaults = set(new_spark_defaults)
+            sudo("echo '{0}' > /opt/{1}/spark/conf/spark-defaults.conf".format(dlab_header, cluster_name))
+            for prop in new_spark_defaults:
+                prop = prop.rstrip()
+                sudo('echo "{0}" >> /opt/{1}/spark/conf/spark-defaults.conf'.format(prop, cluster_name))
+            sudo('sed -i "/^\s*$/d" /opt/{0}/spark/conf/spark-defaults.conf'.format(cluster_name))
     except Exception as err:
         print('Error: {0}'.format(err))
         sys.exit(1)


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@dlab.apache.org
For additional commands, e-mail: commits-help@dlab.apache.org