You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@datalab.apache.org by my...@apache.org on 2021/04/30 10:44:59 UTC

[incubator-datalab] branch DATALAB-2372 updated: [DataLab-2372] - [AWS][Azure] Deeplearning notebook updated for using aws predefined image

This is an automated email from the ASF dual-hosted git repository.

mykolabodnar pushed a commit to branch DATALAB-2372
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git


The following commit(s) were added to refs/heads/DATALAB-2372 by this push:
     new 1d85ccc  [DataLab-2372] - [AWS][Azure] Deeplearning notebook updated for using aws predefined image
1d85ccc is described below

commit 1d85ccc92d581c2c6128c856ad892613d78b6087
Author: bodnarmykola <bo...@gmail.com>
AuthorDate: Fri Apr 30 13:44:41 2021 +0300

    [DataLab-2372] - [AWS][Azure] Deeplearning notebook updated for using aws predefined image
---
 .../src/deeplearning/scripts/configure_deep_learning_node.py        | 4 ++--
 infrastructure-provisioning/src/general/conf/datalab.ini            | 4 ++--
 .../src/general/files/aws/deeplearning_description.json             | 4 ++--
 .../src/general/files/azure/deeplearning_description.json           | 6 +++---
 infrastructure-provisioning/src/general/lib/os/fab.py               | 6 +++---
 .../src/general/scripts/aws/common_prepare_notebook.py              | 4 ++--
 6 files changed, 14 insertions(+), 14 deletions(-)

diff --git a/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py b/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py
index 39642f0..9678bc8 100644
--- a/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py
+++ b/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py
@@ -106,7 +106,7 @@ if __name__ == "__main__":
     print("Mount additional volume")
     prepare_disk(args.os_user)
 
-    if not os.environ['aws_deeplearning_image_name']:
+    if os.environ['conf_deeplearning_cloud_ami'] == 'false':
         # INSTALL LANGUAGES
         print("Install Java")
         ensure_jre_jdk(args.os_user)
@@ -156,7 +156,7 @@ if __name__ == "__main__":
         ensure_additional_python_libs(args.os_user)
         print("Install Matplotlib")
         ensure_matplot(args.os_user)
-    else:
+    elif os.environ['conf_deeplearning_cloud_ami'] == 'true':
         # CONFIGURE JUPYTER NOTEBOOK
         print("Configure Jupyter")
         configure_jupyter(args.os_user, jupyter_conf_file, templates_dir, args.jupyter_version, args.exploratory_name)
diff --git a/infrastructure-provisioning/src/general/conf/datalab.ini b/infrastructure-provisioning/src/general/conf/datalab.ini
index 480c6c9..b92b732 100644
--- a/infrastructure-provisioning/src/general/conf/datalab.ini
+++ b/infrastructure-provisioning/src/general/conf/datalab.ini
@@ -91,6 +91,8 @@ repository_user =
 repository_pass =
 ### Repository url
 repository_address =
+### Deeplearning native cloud AMI enabled
+deeplearning_cloud_ami = true
 
 #--- [aws] section contains all common parameters related to Amazon ---#
 [aws]
@@ -126,8 +128,6 @@ region = us-west-2
 debian_image_name = ubuntu/images/hvm-ssd/ubuntu-focal-20.04-amd64-server-20201026
 ### Amazon ami name based on RedHat conf_os_family for all DataLab instances
 redhat_image_name = RHEL-7.4_HVM-20180103-x86_64-2-Hourly2-GP2
-### Amazon ami name for Deeplearning notebook
-deeplearning_image_name = Deep Learning AMI (Ubuntu 18.04) Version 42.1
 ### Amazon account ID
 # account_id =
 ### Amazon billing bucket
diff --git a/infrastructure-provisioning/src/general/files/aws/deeplearning_description.json b/infrastructure-provisioning/src/general/files/aws/deeplearning_description.json
index 5339445..a0519a8 100644
--- a/infrastructure-provisioning/src/general/files/aws/deeplearning_description.json
+++ b/infrastructure-provisioning/src/general/files/aws/deeplearning_description.json
@@ -8,10 +8,10 @@
   "exploratory_environment_versions" :
   [
     {
-      "template_name": "AWS Deep Learning  42.1",
+      "template_name": "Deep Learning AMI (Ubuntu 18.04) Version 42.1",
       "description": "MXNet-1.8.0 & 1.7.0, TensorFlow-2.4.1, 2.1.3 & 1.15.5, PyTorch-1.4.0 & 1.8.0, Neuron, & others. NVIDIA CUDA, cuDNN, NCCL, Intel MKL-DNN, Docker, NVIDIA-Docker & EFA support. Uses Anaconda virtual environments, configured to keep the different framework installations separate and easy to switch between frameworks as Jupyter kernels.",
       "environment_type": "exploratory",
-      "version": "deeplearning-42.1",
+      "version": "Deep Learning AMI (Ubuntu 18.04) Version 42.1",
       "vendor": "AWS"
     }
   ]
diff --git a/infrastructure-provisioning/src/general/files/azure/deeplearning_description.json b/infrastructure-provisioning/src/general/files/azure/deeplearning_description.json
index 9094ca0..c0db4b8 100644
--- a/infrastructure-provisioning/src/general/files/azure/deeplearning_description.json
+++ b/infrastructure-provisioning/src/general/files/azure/deeplearning_description.json
@@ -8,10 +8,10 @@
   "exploratory_environment_versions" :
   [
     {
-      "template_name": "Deep Learning  2.4",
-      "description": "Base image with Deep Learning and Jupyter",
+      "template_name": "Data Science Virtual Machine - Ubuntu 18.04",
+      "description": "Pre-configured with NVIDIA drivers, CUDA Toolkit, and cuDNN library for GPU workloads with the following highlights: Jupyter, JupyterLab, and JupyterHub; Deep learning with TensorFlow and PyTorch; Machine learning with xgboost, Vowpal Wabbit, and LightGBM; Julia; Azure SDKs and libraries; Azure Machine Learning SDKs and sample notebooks; R support; Spark. Uses Anaconda virtual environments, configured to keep the different framework installations separate and easy t [...]
       "environment_type": "exploratory",
-      "version": "deeplearning-2.4",
+      "version": "microsoft-dsvm:ubuntu-1804:1804-gen2",
       "vendor": "Azure"
     }
   ]
diff --git a/infrastructure-provisioning/src/general/lib/os/fab.py b/infrastructure-provisioning/src/general/lib/os/fab.py
index 18d3818..89e1ba0 100644
--- a/infrastructure-provisioning/src/general/lib/os/fab.py
+++ b/infrastructure-provisioning/src/general/lib/os/fab.py
@@ -191,7 +191,7 @@ def put_resource_status(resource, status, datalab_path, os_user, hostname):
 def configure_jupyter(os_user, jupyter_conf_file, templates_dir, jupyter_version, exploratory_name):
     if not exists(conn,'/home/' + os_user + '/.ensure_dir/jupyter_ensured'):
         try:
-            if not os.environ['aws_deeplearning_image_name']:
+            if os.environ['conf_deeplearning_cloud_ami'] == 'false' or os.environ['application'] != 'deeplearning':
                 conn.sudo('pip3 install notebook=={} --no-cache-dir'.format(jupyter_version))
                 conn.sudo('pip3 install jupyter --no-cache-dir')
                 conn.sudo('rm -rf {}'.format(jupyter_conf_file))
@@ -204,7 +204,7 @@ def configure_jupyter(os_user, jupyter_conf_file, templates_dir, jupyter_version
             conn.sudo('echo \'c.NotebookApp.cookie_secret = b"{0}"\' >> {1}'.format(id_generator(), jupyter_conf_file))
             conn.sudo('''echo "c.NotebookApp.token = u''" >> {}'''.format(jupyter_conf_file))
             conn.sudo('echo \'c.KernelSpecManager.ensure_native_kernel = False\' >> {}'.format(jupyter_conf_file))
-            if os.environ['aws_deeplearning_image_name']:
+            if os.environ['conf_deeplearning_cloud_ami'] == 'true':
                 conn.sudo(
                     '''echo "c.NotebookApp.kernel_spec_manager_class = 'environment_kernels.EnvironmentKernelSpecManager'" >> {}'''.format(
                         jupyter_conf_file))
@@ -215,7 +215,7 @@ def configure_jupyter(os_user, jupyter_conf_file, templates_dir, jupyter_version
             conn.sudo("chmod 644 /tmp/jupyter-notebook.service")
             if os.environ['application'] == 'tensor':
                 conn.sudo("sed -i '/ExecStart/s|-c \"|-c \"export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/opt/cudnn/lib64:/usr/local/cuda/lib64; |g' /tmp/jupyter-notebook.service")
-            elif os.environ['application'] == 'deeplearning' and not os.environ['aws_plearning_image_name']:
+            elif os.environ['application'] == 'deeplearning' and os.environ['conf_deeplearning_cloud_ami'] == 'false':
                 conn.sudo("sed -i '/ExecStart/s|-c \"|-c \"export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/opt/cudnn/lib64:/usr/local/cuda/lib64:/usr/lib64/openmpi/lib: ; export PYTHONPATH=/home/" + os_user +
                      "/caffe/python:/home/" + os_user + "/pytorch/build:$PYTHONPATH ; |g' /tmp/jupyter-notebook.service")
             conn.sudo("sed -i 's|CONF_PATH|{}|' /tmp/jupyter-notebook.service".format(jupyter_conf_file))
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_prepare_notebook.py b/infrastructure-provisioning/src/general/scripts/aws/common_prepare_notebook.py
index 17fde95..270ae56 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_prepare_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_prepare_notebook.py
@@ -104,8 +104,8 @@ if __name__ == "__main__":
         notebook_config['ami_id'] = datalab.meta_lib.get_ami_id(os.environ['aws_{}_image_name'.format(
             os.environ['conf_os_family'])])
         image_id = datalab.meta_lib.get_ami_id_by_name(notebook_config['notebook_image_name'], 'available')
-        if os.environ['aws_deeplearning_image_name'] and os.environ['application'] == 'deeplearning':
-            image_id = datalab.meta_lib.get_ami_id(os.environ['aws_deeplearning_image_name'])
+        if os.environ['conf_deeplearning_cloud_ami'] == 'true' and os.environ['application'] == 'deeplearning' and image_id == '':
+            image_id = datalab.meta_lib.get_ami_id(notebook_config['notebook_image_name'])
         if image_id != '':
             notebook_config['ami_id'] = image_id
             print('Pre-configured image found. Using: {}'.format(notebook_config['ami_id']))

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org