You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@datalab.apache.org by lf...@apache.org on 2021/04/29 10:35:35 UTC

[incubator-datalab] branch DATALAB-2091 updated: [DATALAB-2091]: changed where install_sparkamagic_kernels function is located

This is an automated email from the ASF dual-hosted git repository.

lfrolov pushed a commit to branch DATALAB-2091
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git


The following commit(s) were added to refs/heads/DATALAB-2091 by this push:
     new 257e4ad  [DATALAB-2091]: changed where install_sparkamagic_kernels function is located
257e4ad is described below

commit 257e4adc60e1be6edce7ab14ee20ee1a0459d89e
Author: leonidfrolov <fr...@gmail.com>
AuthorDate: Thu Apr 29 13:34:48 2021 +0300

    [DATALAB-2091]: changed where install_sparkamagic_kernels function is located
---
 .../jupyter_install_dataengine-service_kernels.py  | 44 ++++++++++++++++++----
 1 file changed, 37 insertions(+), 7 deletions(-)

diff --git a/infrastructure-provisioning/src/general/scripts/aws/jupyter_install_dataengine-service_kernels.py b/infrastructure-provisioning/src/general/scripts/aws/jupyter_install_dataengine-service_kernels.py
index df74c76..b098f4f 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/jupyter_install_dataengine-service_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/jupyter_install_dataengine-service_kernels.py
@@ -73,6 +73,34 @@ def configure_notebook(args):
         conn.sudo('mkdir -p /usr/lib64/python3.8')
         conn.sudo('ln -fs /usr/lib/python3.8/datalab /usr/lib64/python3.8/datalab')
 
+def install_sparkamagic_kernels(args):
+    try:
+        datalab.fab.conn.sudo('jupyter nbextension enable --py --sys-prefix widgetsnbextension')
+        sparkmagic_dir = datalab.fab.conn.sudo(''' bash -l -c 'pip3 show sparkmagic | grep "Location: "' ''').stdout.rstrip("\n\r").split(' ')[1]
+        datalab.fab.conn.sudo('jupyter-kernelspec install {}/sparkmagic/kernels/sparkkernel --prefix=/home/{}/.local/'.format(sparkmagic_dir, args.os_user))
+        datalab.fab.conn.sudo('jupyter-kernelspec install {}/sparkmagic/kernels/pysparkkernel --prefix=/home/{}/.local/'.format(sparkmagic_dir, args.os_user))
+        datalab.fab.conn.sudo('jupyter-kernelspec install {}/sparkmagic/kernels/sparkrkernel --prefix=/home/{}/.local/'.format(sparkmagic_dir, args.os_user))
+        pyspark_kernel_name = 'PySpark (Python-{0} / Spark-{1} ) [{2}]'.format(args.python_version, args.spark_version,
+                                                                         args.cluster_name)
+        datalab.fab.conn.sudo('sed -i \'s|PySpark|{0}|g\' /home/{1}/.local/share/jupyter/kernels/pysparkkernel/kernel.json'.format(
+            pyspark_kernel_name, args.os_user))
+        spark_kernel_name = 'Spark (Scala-{0} / Spark-{1} ) [{2}]'.format(args.scala_version, args.spark_version,
+                                                                         args.cluster_name)
+        datalab.fab.conn.sudo('sed -i \'s|Spark|{0}|g\' /home/{1}/.local/share/jupyter/kernels/sparkkernel/kernel.json'.format(
+            spark_kernel_name, args.os_user))
+        sparkr_kernel_name = 'SparkR (R-{0} / Spark-{1} ) [{2}]'.format(args.r_version, args.spark_version,
+                                                                            args.cluster_name)
+        datalab.fab.conn.sudo('sed -i \'s|SparkR|{0}|g\' /home/{1}/.local/share/jupyter/kernels/sparkrkernel/kernel.json'.format(
+            sparkr_kernel_name, args.os_user))
+        datalab.fab.conn.sudo('mkdir -p /home/' + args.os_user + '/.sparkmagic')
+        datalab.fab.conn.sudo('cp -f /tmp/sparkmagic_config_template.json /home/' + args.os_user + '/.sparkmagic/config.json')
+        datalab.fab.conn.sudo('sed -i \'s|LIVY_HOST|{0}|g\' /home/{1}/.sparkmagic/config.json'.format(
+                args.master_ip, args.os_user))
+        datalab.fab.conn.sudo('chown -R {0}:{0} /home/{0}/.sparkmagic/'.format(args.os_user))
+    except:
+        traceback.print_exc()
+        sys.exit(1)
+
 
 if __name__ == "__main__":
     global conn
@@ -103,10 +131,12 @@ if __name__ == "__main__":
     cluster_id = get_emr_id_by_name(args.cluster_name)
     master_instances = get_emr_instances_list(cluster_id, 'MASTER')
     master_ip = master_instances[0].get('PrivateIpAddress')
-    conn.sudo("/usr/bin/python3 /usr/local/bin/jupyter_dataengine-service_create_configs.py --bucket " + args.bucket
-         + " --cluster_name " + args.cluster_name + " --emr_version " + args.emr_version + " --spark_version "
-         + spark_version + " --scala_version " + scala_version + " --r_version " + r_version + " --hadoop_version "
-         + hadoop_version + " --region " + args.region + " --excluded_lines '" + args.emr_excluded_spark_properties
-         + "' --project_name " + args.project_name + " --os_user " + args.os_user + " --pip_mirror "
-         + args.pip_mirror + " --numpy_version " + numpy_version + " --application "
-         + args.application + " --master_ip " + master_ip + " --python_version " + python_version)
+    #conn.sudo("/usr/bin/python3 /usr/local/bin/jupyter_dataengine-service_create_configs.py --bucket " + args.bucket
+    #     + " --cluster_name " + args.cluster_name + " --emr_version " + args.emr_version + " --spark_version "
+    #     + spark_version + " --scala_version " + scala_version + " --r_version " + r_version + " --hadoop_version "
+    #     + hadoop_version + " --region " + args.region + " --excluded_lines '" + args.emr_excluded_spark_properties
+    #     + "' --project_name " + args.project_name + " --os_user " + args.os_user + " --pip_mirror "
+    #     + args.pip_mirror + " --numpy_version " + numpy_version + " --application "
+    #     + args.application + " --master_ip " + master_ip + " --python_version " + python_version)
+
+    install_sparkamagic_kernels(args)

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org