You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@datalab.apache.org by lf...@apache.org on 2021/04/19 14:03:46 UTC

[incubator-datalab] branch DATALAB-2091 updated (abcf565 -> ce9908a)

This is an automated email from the ASF dual-hosted git repository.

lfrolov pushed a change to branch DATALAB-2091
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git.


    from abcf565  [DATALAB-2091]: tmp use python2 instead of python3
     new f743296  [DATALAB-2091]: fixed some emr deployment errors
     new ce9908a  [DATALAB-2091]: changed connections to python3

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../src/general/lib/aws/meta_lib.py                |   3 +-
 .../src/general/lib/azure/actions_lib.py           |   8 +-
 .../src/general/lib/os/redhat/notebook_lib.py      | 356 ++++++++++-----------
 .../scripts/aws/dataengine-service_configure.py    |   3 +-
 .../jupyter_install_dataengine-service_kernels.py  |   8 +-
 .../rstudio_install_dataengine-service_kernels.py  |   8 +-
 .../zeppelin_install_dataengine-service_kernels.py |   8 +-
 .../general/scripts/azure/common_start_notebook.py |   7 +-
 .../rstudio_install_dataengine-service_kernels.py  |   8 +-
 .../zeppelin_install_dataengine-service_kernels.py |   8 +-
 .../general/scripts/os/common_configure_proxy.py   |   3 +-
 .../os/deeplearning_install_dataengine_kernels.py  |  10 +-
 .../os/rstudio_install_dataengine_kernels.py       |  10 +-
 .../tensor-rstudio_install_dataengine_kernels.py   |  10 +-
 .../os/tensor_install_dataengine_kernels.py        |  10 +-
 .../os/zeppelin_install_dataengine_kernels.py      |  10 +-
 16 files changed, 223 insertions(+), 247 deletions(-)

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org


[incubator-datalab] 02/02: [DATALAB-2091]: changed connections to python3

Posted by lf...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

lfrolov pushed a commit to branch DATALAB-2091
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git

commit ce9908ab836f15726587d1b3e353a56166a50a27
Author: leonidfrolov <fr...@gmail.com>
AuthorDate: Mon Apr 19 17:03:27 2021 +0300

    [DATALAB-2091]: changed connections to python3
---
 .../src/general/lib/azure/actions_lib.py                       |  8 +++-----
 .../scripts/aws/jupyter_install_dataengine-service_kernels.py  |  8 +++-----
 .../scripts/aws/rstudio_install_dataengine-service_kernels.py  |  8 +++-----
 .../scripts/aws/zeppelin_install_dataengine-service_kernels.py |  8 +++-----
 .../src/general/scripts/azure/common_start_notebook.py         |  7 +++----
 .../scripts/gcp/rstudio_install_dataengine-service_kernels.py  |  8 +++-----
 .../scripts/gcp/zeppelin_install_dataengine-service_kernels.py |  8 +++-----
 .../scripts/os/deeplearning_install_dataengine_kernels.py      | 10 ++++------
 .../general/scripts/os/rstudio_install_dataengine_kernels.py   | 10 ++++------
 .../scripts/os/tensor-rstudio_install_dataengine_kernels.py    | 10 ++++------
 .../general/scripts/os/tensor_install_dataengine_kernels.py    | 10 ++++------
 .../general/scripts/os/zeppelin_install_dataengine_kernels.py  | 10 ++++------
 12 files changed, 41 insertions(+), 64 deletions(-)

diff --git a/infrastructure-provisioning/src/general/lib/azure/actions_lib.py b/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
index 47d39cb..df37644 100644
--- a/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
+++ b/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
@@ -970,10 +970,8 @@ class AzureActions:
     def remove_dataengine_kernels(self, resource_group_name, notebook_name, os_user, key_path, cluster_name):
         try:
             private = meta_lib.AzureMeta().get_private_ip_address(resource_group_name, notebook_name)
-            env.hosts = "{}".format(private)
-            env.user = "{}".format(os_user)
-            env.key_filename = "{}".format(key_path)
-            env.host_string = env.user + "@" + env.hosts
+            global conn
+            conn = datalab.fab.init_datalab_connection(private, os_user, key_path)
             conn.sudo('rm -rf /home/{}/.local/share/jupyter/kernels/*_{}'.format(os_user, cluster_name))
             if exists(conn, '/home/{}/.ensure_dir/dataengine_{}_interpreter_ensured'.format(os_user, cluster_name)):
                 if os.environ['notebook_multiple_clusters'] == 'true':
@@ -1019,7 +1017,7 @@ class AzureActions:
             if exists(conn, '/home/{}/.ensure_dir/rstudio_dataengine_ensured'.format(os_user)):
                 datalab.fab.remove_rstudio_dataengines_kernel(os.environ['computational_name'], os_user)
             conn.sudo('rm -rf  /opt/' + cluster_name + '/')
-            print("Notebook's {} kernels were removed".format(env.hosts))
+            print("Notebook's {} kernels were removed".format(private))
         except Exception as err:
             logging.info("Unable to remove kernels on Notebook: " + str(err) + "\n Traceback: " + traceback.print_exc(
                 file=sys.stdout))
diff --git a/infrastructure-provisioning/src/general/scripts/aws/jupyter_install_dataengine-service_kernels.py b/infrastructure-provisioning/src/general/scripts/aws/jupyter_install_dataengine-service_kernels.py
index de508f9..2b985b7 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/jupyter_install_dataengine-service_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/jupyter_install_dataengine-service_kernels.py
@@ -65,7 +65,7 @@ def configure_notebook(args):
     conn.sudo('chmod 755 /usr/local/bin/jupyter_dataengine-service_create_configs.py')
     conn.sudo('mkdir -p /usr/lib/python3.8/datalab/')
     conn.run('mkdir -p /tmp/datalab_libs/')
-    subprocess.run('scp -i {} /usr/lib/python3.8/datalab/*.py {}:/tmp/datalab_libs/'.format(args.keyfile, env.host_string), shell=True, check=True)
+    subprocess.run('scp -i {} /usr/lib/python3.8/datalab/*.py {}:/tmp/datalab_libs/'.format(args.keyfile, args.notebook_ip), shell=True, check=True)
     conn.run('chmod a+x /tmp/datalab_libs/*')
     conn.sudo('mv /tmp/datalab_libs/* /usr/lib/python3.8/datalab/')
     if exists(conn, '/usr/lib64'):
@@ -74,10 +74,8 @@ def configure_notebook(args):
 
 
 if __name__ == "__main__":
-    env.hosts = "{}".format(args.notebook_ip)
-    env.user = args.os_user
-    env.key_filename = "{}".format(args.keyfile)
-    env.host_string = env.user + "@" + env.hosts
+    global conn
+    conn = datalab.fab.init_datalab_connection(args.notebook_ip, args.os_user, args.keyfile)
     configure_notebook(args)
     spark_version = get_spark_version(args.cluster_name)
     hadoop_version = get_hadoop_version(args.cluster_name)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/rstudio_install_dataengine-service_kernels.py b/infrastructure-provisioning/src/general/scripts/aws/rstudio_install_dataengine-service_kernels.py
index 27886ca..53e5b5f 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/rstudio_install_dataengine-service_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/rstudio_install_dataengine-service_kernels.py
@@ -53,7 +53,7 @@ def configure_notebook(args):
     conn.sudo('chmod 755 /usr/local/bin/rstudio_dataengine-service_create_configs.py')
     conn.sudo('mkdir -p /usr/lib/python3.8/datalab/')
     conn.run('mkdir -p /tmp/datalab_libs/')
-    conn.local('scp -i {} /usr/lib/python3.8/datalab/*.py {}:/tmp/datalab_libs/'.format(args.keyfile, env.host_string))
+    conn.local('scp -i {} /usr/lib/python3.8/datalab/*.py {}:/tmp/datalab_libs/'.format(args.keyfile, args.notebook_ip))
     conn.run('chmod a+x /tmp/datalab_libs/*')
     conn.sudo('mv /tmp/datalab_libs/* /usr/lib/python3.8/datalab/')
     if exists(conn, '/usr/lib64'):
@@ -62,10 +62,8 @@ def configure_notebook(args):
 
 
 if __name__ == "__main__":
-    env.hosts = "{}".format(args.notebook_ip)
-    env.user = args.os_user
-    env.key_filename = "{}".format(args.keyfile)
-    env.host_string = env.user + "@" + env.hosts
+    global conn
+    conn = datalab.fab.init_datalab_connection(args.notebook_ip, args.os_user, args.keyfile)
     configure_notebook(args)
     spark_version = get_spark_version(args.cluster_name)
     hadoop_version = get_hadoop_version(args.cluster_name)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/zeppelin_install_dataengine-service_kernels.py b/infrastructure-provisioning/src/general/scripts/aws/zeppelin_install_dataengine-service_kernels.py
index a86e622..5fc1377 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/zeppelin_install_dataengine-service_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/zeppelin_install_dataengine-service_kernels.py
@@ -59,7 +59,7 @@ def configure_notebook(args):
     conn.sudo('chmod 755 /usr/local/bin/zeppelin_dataengine-service_create_configs.py')
     conn.sudo('mkdir -p /usr/lib/python3.8/datalab/')
     conn.run('mkdir -p /tmp/datalab_libs/')
-    conn.local('scp -i {} /usr/lib/python3.8/datalab/*.py {}:/tmp/datalab_libs/'.format(args.keyfile, env.host_string))
+    conn.local('scp -i {} /usr/lib/python3.8/datalab/*.py {}:/tmp/datalab_libs/'.format(args.keyfile, args.notebook_ip))
     conn.run('chmod a+x /tmp/datalab_libs/*')
     conn.sudo('mv /tmp/datalab_libs/* /usr/lib/python3.8/datalab/')
     if exists(conn, '/usr/lib64'):
@@ -68,10 +68,8 @@ def configure_notebook(args):
 
 
 if __name__ == "__main__":
-    env.hosts = "{}".format(args.notebook_ip)
-    env.user = args.os_user
-    env.key_filename = "{}".format(args.keyfile)
-    env.host_string = env.user + "@" + env.hosts
+    global conn
+    conn = datalab.fab.init_datalab_connection(args.notebook_ip, args.os_user, args.keyfile)
     configure_notebook(args)
     spark_version = get_spark_version(args.cluster_name)
     hadoop_version = get_hadoop_version(args.cluster_name)
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_start_notebook.py b/infrastructure-provisioning/src/general/scripts/azure/common_start_notebook.py
index e5f3e99..af27198 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_start_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_start_notebook.py
@@ -85,10 +85,9 @@ if __name__ == "__main__":
             print('[UPDATE STORAGE CREDENTIALS]')
             notebook_config['notebook_ip'] = AzureMeta.get_private_ip_address(
                 notebook_config['resource_group_name'], notebook_config['notebook_name'])
-            env.hosts = "{}".format(notebook_config['notebook_ip'])
-            env.user = os.environ['conf_os_user']
-            env.key_filename = "{}".format(notebook_config['keyfile'])
-            env.host_string = env.user + "@" + env.hosts
+            global conn
+            conn = datalab.fab.init_datalab_connection(notebook_config['notebook_ip'], os.environ['conf_os_user'],
+                                                       notebook_config['keyfile'])
             params = '--refresh_token {}'.format(os.environ['azure_user_refresh_token'])
             try:
                 conn.put('~/scripts/common_notebook_update_refresh_token.py', '/tmp/common_notebook_update_refresh_token.py')
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/rstudio_install_dataengine-service_kernels.py b/infrastructure-provisioning/src/general/scripts/gcp/rstudio_install_dataengine-service_kernels.py
index 29c799b..d5119bf 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/rstudio_install_dataengine-service_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/rstudio_install_dataengine-service_kernels.py
@@ -54,7 +54,7 @@ def configure_notebook(args):
     conn.sudo('chmod 755 /usr/local/bin/create_configs.py')
     conn.sudo('mkdir -p /usr/lib/python3.8/datalab/')
     conn.run('mkdir -p /tmp/datalab_libs/')
-    subprocess.run('scp -i {} /usr/lib/python3.8/datalab/*.py {}:/tmp/datalab_libs/'.format(args.keyfile, env.host_string), shell=True, check=True)
+    subprocess.run('scp -i {} /usr/lib/python3.8/datalab/*.py {}:/tmp/datalab_libs/'.format(args.keyfile, args.notebook_ip), shell=True, check=True)
     conn.run('chmod a+x /tmp/datalab_libs/*')
     conn.sudo('mv /tmp/datalab_libs/* /usr/lib/python3.8/datalab/')
     if exists(conn, '/usr/lib64'):
@@ -63,10 +63,8 @@ def configure_notebook(args):
 
 
 if __name__ == "__main__":
-    env.hosts = "{}".format(args.notebook_ip)
-    env.user = args.os_user
-    env.key_filename = "{}".format(args.keyfile)
-    env.host_string = env.user + "@" + env.hosts
+    global conn
+    conn = datalab.fab.init_datalab_connection(args.notebook_ip, args.os_user, args.keyfile)
     configure_notebook(args)
     spark_version = datalab.actions_lib.GCPActions().get_cluster_app_version(args.bucket, args.project_name, args.cluster_name, 'spark')
     hadoop_version = datalab.actions_lib.GCPActions().get_cluster_app_version(args.bucket, args.project_name, args.cluster_name, 'hadoop')
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/zeppelin_install_dataengine-service_kernels.py b/infrastructure-provisioning/src/general/scripts/gcp/zeppelin_install_dataengine-service_kernels.py
index e456bd0..6170589 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/zeppelin_install_dataengine-service_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/zeppelin_install_dataengine-service_kernels.py
@@ -58,7 +58,7 @@ def configure_notebook(args):
     conn.sudo('chmod 755 /usr/local/bin/create_configs.py')
     conn.sudo('mkdir -p /usr/lib/python3.8/datalab/')
     conn.run('mkdir -p /tmp/datalab_libs/')
-    conn.local('scp -i {} /usr/lib/python3.8/datalab/*.py {}:/tmp/datalab_libs/'.format(args.keyfile, env.host_string))
+    conn.local('scp -i {} /usr/lib/python3.8/datalab/*.py {}:/tmp/datalab_libs/'.format(args.keyfile, args.notebook_ip))
     conn.run('chmod a+x /tmp/datalab_libs/*')
     conn.sudo('mv /tmp/datalab_libs/* /usr/lib/python3.8/datalab/')
     if exists(conn, '/usr/lib64'):
@@ -67,10 +67,8 @@ def configure_notebook(args):
 
 
 if __name__ == "__main__":
-    env.hosts = "{}".format(args.notebook_ip)
-    env.user = args.os_user
-    env.key_filename = "{}".format(args.keyfile)
-    env.host_string = env.user + "@" + env.hosts
+    global conn
+    conn = datalab.fab.init_datalab_connection(args.notebook_ip, args.os_user, args.keyfile)
     configure_notebook(args)
     r_enabled = os.environ['notebook_r_enabled']
     spark_version = datalab.actions_lib.GCPActions().get_cluster_app_version(args.bucket, args.project_name, args.cluster_name, 'spark')
diff --git a/infrastructure-provisioning/src/general/scripts/os/deeplearning_install_dataengine_kernels.py b/infrastructure-provisioning/src/general/scripts/os/deeplearning_install_dataengine_kernels.py
index 54a335b..02ac3ee 100644
--- a/infrastructure-provisioning/src/general/scripts/os/deeplearning_install_dataengine_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/os/deeplearning_install_dataengine_kernels.py
@@ -73,16 +73,14 @@ def create_inactivity_log(master_ip, hoststring):
     conn.sudo('''bash -l -c "date +%s > /opt/inactivity/{}_inactivity" '''.format(reworked_ip))
 
 if __name__ == "__main__":
-    env.hosts = "{}".format(args.notebook_ip)
-    env.user = args.os_user
-    env.key_filename = "{}".format(args.keyfile)
-    env.host_string = env.user + "@" + env.hosts
+    global conn
+    conn = datalab.fab.init_datalab_connection(args.notebook_ip, args.os_user, args.keyfile)
     try:
         region = os.environ['aws_region']
     except:
         region = ''
-    configure_notebook(args.keyfile, env.host_string)
-    create_inactivity_log(args.spark_master_ip, env.host_string)
+    configure_notebook(args.keyfile, args.notebook_ip)
+    create_inactivity_log(args.spark_master_ip, args.notebook_ip)
     if 'spark_configurations' not in os.environ:
         os.environ['spark_configurations'] = '[]'
     conn.sudo('/usr/bin/python3 /usr/local/bin/deeplearning_dataengine_create_configs.py '
diff --git a/infrastructure-provisioning/src/general/scripts/os/rstudio_install_dataengine_kernels.py b/infrastructure-provisioning/src/general/scripts/os/rstudio_install_dataengine_kernels.py
index c29703d..e5c8201 100644
--- a/infrastructure-provisioning/src/general/scripts/os/rstudio_install_dataengine_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/os/rstudio_install_dataengine_kernels.py
@@ -70,18 +70,16 @@ def create_inactivity_log(master_ip, hoststring):
     conn.sudo('''bash -l -c "date +%s > /opt/inactivity/{}_inactivity" '''.format(reworked_ip))
 
 if __name__ == "__main__":
-    env.hosts = "{}".format(args.notebook_ip)
-    env.user = args.os_user
-    env.key_filename = "{}".format(args.keyfile)
-    env.host_string = env.user + "@" + env.hosts
+    global conn
+    conn = datalab.fab.init_datalab_connection(args.notebook_ip, args.os_user, args.keyfile)
     try:
         region = os.environ['aws_region']
     except:
         region = ''
     if 'spark_configurations' not in os.environ:
         os.environ['spark_configurations'] = '[]'
-    configure_notebook(args.keyfile, env.host_string)
-    create_inactivity_log(args.spark_master_ip, env.host_string)
+    configure_notebook(args.keyfile, args.notebook_ip)
+    create_inactivity_log(args.spark_master_ip, args.notebook_ip)
     conn.sudo('/usr/bin/python3 /usr/local/bin/rstudio_dataengine_create_configs.py '
          '--cluster_name {} --spark_version {} --hadoop_version {} --os_user {} --spark_master {} --region {} '
          '--datalake_enabled {} --spark_configurations "{}"'.
diff --git a/infrastructure-provisioning/src/general/scripts/os/tensor-rstudio_install_dataengine_kernels.py b/infrastructure-provisioning/src/general/scripts/os/tensor-rstudio_install_dataengine_kernels.py
index da67316..91123f8 100644
--- a/infrastructure-provisioning/src/general/scripts/os/tensor-rstudio_install_dataengine_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/os/tensor-rstudio_install_dataengine_kernels.py
@@ -72,18 +72,16 @@ def create_inactivity_log(master_ip, hoststring):
     conn.sudo('''bash -l -c "date +%s > /opt/inactivity/{}_inactivity" '''.format(reworked_ip))
 
 if __name__ == "__main__":
-    env.hosts = "{}".format(args.notebook_ip)
-    env.user = args.os_user
-    env.key_filename = "{}".format(args.keyfile)
-    env.host_string = env.user + "@" + env.hosts
+    global conn
+    conn = datalab.fab.init_datalab_connection(args.notebook_ip, args.os_user, args.keyfile)
     try:
         region = os.environ['aws_region']
     except:
         region = ''
     if 'spark_configurations' not in os.environ:
         os.environ['spark_configurations'] = '[]'
-    configure_notebook(args.keyfile, env.host_string)
-    create_inactivity_log(args.spark_master_ip, env.host_string)
+    configure_notebook(args.keyfile, args.notebook_ip)
+    create_inactivity_log(args.spark_master_ip, args.notebook_ip)
     conn.sudo('/usr/bin/python3 /usr/local/bin/tensor-rstudio_dataengine_create_configs.py '
          '--cluster_name {} --spark_version {} --hadoop_version {} --os_user {} --spark_master {} --region {} '
          '--datalake_enabled {} --spark_configurations "{}"'.
diff --git a/infrastructure-provisioning/src/general/scripts/os/tensor_install_dataengine_kernels.py b/infrastructure-provisioning/src/general/scripts/os/tensor_install_dataengine_kernels.py
index d706050..1c9552d 100644
--- a/infrastructure-provisioning/src/general/scripts/os/tensor_install_dataengine_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/os/tensor_install_dataengine_kernels.py
@@ -70,18 +70,16 @@ def create_inactivity_log(master_ip, hoststring):
     conn.sudo('''bash -l -c "date +%s > /opt/inactivity/{}_inactivity" '''.format(reworked_ip))
 
 if __name__ == "__main__":
-    env.hosts = "{}".format(args.notebook_ip)
-    env.user = args.os_user
-    env.key_filename = "{}".format(args.keyfile)
-    env.host_string = env.user + "@" + env.hosts
+    global conn
+    conn = datalab.fab.init_datalab_connection(args.notebook_ip, args.os_user, args.keyfile)
     try:
         region = os.environ['aws_region']
     except:
         region = ''
     if 'spark_configurations' not in os.environ:
         os.environ['spark_configurations'] = '[]'
-    configure_notebook(args.keyfile, env.host_string)
-    create_inactivity_log(args.spark_master_ip, env.host_string)
+    configure_notebook(args.keyfile, args.notebook_ip)
+    create_inactivity_log(args.spark_master_ip, args.notebook_ip)
     conn.sudo('/usr/bin/python3 /usr/local/bin/tensor_dataengine_create_configs.py '
          '--cluster_name {} --spark_version {} --hadoop_version {} --os_user {} --spark_master {} --region {} '
          '--datalake_enabled {} --spark_configurations "{}"'.
diff --git a/infrastructure-provisioning/src/general/scripts/os/zeppelin_install_dataengine_kernels.py b/infrastructure-provisioning/src/general/scripts/os/zeppelin_install_dataengine_kernels.py
index a791dae..f0e7201 100644
--- a/infrastructure-provisioning/src/general/scripts/os/zeppelin_install_dataengine_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/os/zeppelin_install_dataengine_kernels.py
@@ -77,18 +77,16 @@ def create_inactivity_log(master_ip, hoststring):
     conn.sudo('''bash -l -c "date +%s > /opt/inactivity/{}_inactivity" '''.format(reworked_ip))
 
 if __name__ == "__main__":
-    env.hosts = "{}".format(args.notebook_ip)
-    env.user = args.os_user
-    env.key_filename = "{}".format(args.keyfile)
-    env.host_string = env.user + "@" + env.hosts
+    global conn
+    conn = datalab.fab.init_datalab_connection(args.notebook_ip, args.os_user, args.keyfile)
     try:
         region = os.environ['aws_region']
     except:
         region = ''
     if 'spark_configurations' not in os.environ:
         os.environ['spark_configurations'] = '[]'
-    configure_notebook(args.keyfile, env.host_string)
-    create_inactivity_log(args.spark_master_ip, env.host_string)
+    configure_notebook(args.keyfile, args.notebook_ip)
+    create_inactivity_log(args.spark_master_ip, args.notebook_ip)
     livy_version = os.environ['notebook_livy_version']
     r_enabled = os.environ['notebook_r_enabled']
     conn.sudo('/usr/bin/python3 /usr/local/bin/zeppelin_dataengine_create_configs.py '

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org


[incubator-datalab] 01/02: [DATALAB-2091]: fixed some emr deployment errors

Posted by lf...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

lfrolov pushed a commit to branch DATALAB-2091
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git

commit f74329674a4b70a41f7c18144fce016c44ff69f9
Author: leonidfrolov <fr...@gmail.com>
AuthorDate: Mon Apr 19 16:51:05 2021 +0300

    [DATALAB-2091]: fixed some emr deployment errors
---
 .../src/general/lib/aws/meta_lib.py                |   3 +-
 .../src/general/lib/os/redhat/notebook_lib.py      | 356 ++++++++++-----------
 .../scripts/aws/dataengine-service_configure.py    |   3 +-
 .../general/scripts/os/common_configure_proxy.py   |   3 +-
 4 files changed, 182 insertions(+), 183 deletions(-)

diff --git a/infrastructure-provisioning/src/general/lib/aws/meta_lib.py b/infrastructure-provisioning/src/general/lib/aws/meta_lib.py
index 07de8b0..e959d83 100644
--- a/infrastructure-provisioning/src/general/lib/aws/meta_lib.py
+++ b/infrastructure-provisioning/src/general/lib/aws/meta_lib.py
@@ -678,8 +678,7 @@ def check_security_group(security_group_name, count=0):
 
 def emr_waiter(tag_name, tag_value):
     if len(get_emr_list(tag_value, 'Value', False, True)) > 0 or os.path.exists('/response/.emr_creating_' + os.environ['exploratory_name']) or get_not_configured_emr(tag_name, tag_value):
-        with hide('stderr', 'running', 'warnings'):
-            subprocess.run("echo 'Some EMR cluster is still being created/terminated, waiting..'", shell=True, check=True)
+        subprocess.run("echo 'Some EMR cluster is still being created/terminated, waiting..'", shell=True, check=True)
         time.sleep(60)
         emr_waiter(tag_name, tag_value)
     else:
diff --git a/infrastructure-provisioning/src/general/lib/os/redhat/notebook_lib.py b/infrastructure-provisioning/src/general/lib/os/redhat/notebook_lib.py
index 523e9a2..35e53a2 100644
--- a/infrastructure-provisioning/src/general/lib/os/redhat/notebook_lib.py
+++ b/infrastructure-provisioning/src/general/lib/os/redhat/notebook_lib.py
@@ -36,13 +36,13 @@ def enable_proxy(proxy_host, proxy_port):
     try:
         proxy_string = "http://%s:%s" % (proxy_host, proxy_port)
         proxy_https_string = "https://%s:%s" % (proxy_host, proxy_port)
-        conn.sudo('sed -i "/^export http_proxy/d" /etc/profile')
-        conn.sudo('sed -i "/^export https_proxy/d" /etc/profile')
-        conn.sudo('echo export http_proxy=' + proxy_string + ' >> /etc/profile')
-        conn.sudo('echo export https_proxy=' + proxy_string + ' >> /etc/profile')
-        if exists(conn, '/etc/yum.conf'):
-            conn.sudo('sed -i "/^proxy=/d" /etc/yum.conf')
-        conn.sudo("echo 'proxy={}' >> /etc/yum.conf".format(proxy_string))
+        datalab.fab.conn.sudo('sed -i "/^export http_proxy/d" /etc/profile')
+        datalab.fab.conn.sudo('sed -i "/^export https_proxy/d" /etc/profile')
+        datalab.fab.conn.sudo('bash -c "echo export http_proxy=' + proxy_string + ' >> /etc/profile"')
+        datalab.fab.conn.sudo('bash -c "echo export https_proxy=' + proxy_string + ' >> /etc/profile"')
+        if exists(datalab.fab.conn, '/etc/yum.conf'):
+            datalab.fab.conn.sudo('sed -i "/^proxy=/d" /etc/yum.conf')
+        datalab.fab.conn.sudo('''bash -c "echo 'proxy={}' >> /etc/yum.conf" '''.format(proxy_string))
         manage_pkg('clean all', 'remote', '')
     except:
         sys.exit(1)
@@ -50,7 +50,7 @@ def enable_proxy(proxy_host, proxy_port):
 
 def downgrade_python_version():
     try:
-       conn.sudo('python3 -c "import os,sys,yum; yb = yum.YumBase(); pl = yb.doPackageLists(); \
+       datalab.fab.conn.sudo('python3 -c "import os,sys,yum; yb = yum.YumBase(); pl = yb.doPackageLists(); \
         version = [pkg.vr for pkg in pl.installed if pkg.name == \'python\']; \
         os.system(\'yum -y downgrade python python-devel-2.7.5-58.el7.x86_64 python-libs-2.7.5-58.el7.x86_64\') \
         if version != [] and version[0] == \'2.7.5-68.el7\' else False"')
@@ -59,30 +59,30 @@ def downgrade_python_version():
 
 
 def ensure_r_local_kernel(spark_version, os_user, templates_dir, kernels_dir):
-    if not exists(conn,'/home/{}/.ensure_dir/r_kernel_ensured'.format(os_user)):
+    if not exists(datalab.fab.conn,'/home/{}/.ensure_dir/r_kernel_ensured'.format(os_user)):
         try:
-            conn.sudo('chown -R ' + os_user + ':' + os_user + ' /home/' + os_user + '/.local')
-            conn.run('R -e "IRkernel::installspec()"')
-            conn.sudo('ln -s /opt/spark/ /usr/local/spark')
+            datalab.fab.conn.sudo('chown -R ' + os_user + ':' + os_user + ' /home/' + os_user + '/.local')
+            datalab.fab.conn.run('R -e "IRkernel::installspec()"')
+            datalab.fab.conn.sudo('ln -s /opt/spark/ /usr/local/spark')
             try:
-                conn.sudo('''bash -c 'cd /usr/local/spark/R/lib/SparkR; R -e "install.packages(\'roxygen2\',repos=\'https://cloud.r-project.org\')" R -e "devtools::check(\'.\')"' ''')
+                datalab.fab.conn.sudo('''bash -c 'cd /usr/local/spark/R/lib/SparkR; R -e "install.packages(\'roxygen2\',repos=\'https://cloud.r-project.org\')" R -e "devtools::check(\'.\')"' ''')
             except:
                 pass
-            conn.sudo('''bash -c 'cd /usr/local/spark/R/lib/SparkR; R -e "devtools::install(\'.\')"' ''')
-            r_version = conn.sudo("R --version | awk '/version / {print $3}'").stdout.replace('\n','')
-            conn.put(templates_dir + 'r_template.json', '/tmp/r_template.json')
-            conn.sudo('sed -i "s|R_VER|' + r_version + '|g" /tmp/r_template.json')
-            conn.sudo('sed -i "s|SP_VER|' + spark_version + '|g" /tmp/r_template.json')
-            conn.sudo('\cp -f /tmp/r_template.json {}/ir/kernel.json'.format(kernels_dir))
-            conn.sudo('ln -s /usr/lib64/R/ /usr/lib/R')
-            conn.sudo('chown -R ' + os_user + ':' + os_user + ' /home/' + os_user + '/.local')
-            conn.sudo('touch /home/{}/.ensure_dir/r_kernel_ensured'.format(os_user))
+            datalab.fab.conn.sudo('''bash -c 'cd /usr/local/spark/R/lib/SparkR; R -e "devtools::install(\'.\')"' ''')
+            r_version = datalab.fab.conn.sudo("R --version | awk '/version / {print $3}'").stdout.replace('\n','')
+            datalab.fab.conn.put(templates_dir + 'r_template.json', '/tmp/r_template.json')
+            datalab.fab.conn.sudo('sed -i "s|R_VER|' + r_version + '|g" /tmp/r_template.json')
+            datalab.fab.conn.sudo('sed -i "s|SP_VER|' + spark_version + '|g" /tmp/r_template.json')
+            datalab.fab.conn.sudo('\cp -f /tmp/r_template.json {}/ir/kernel.json'.format(kernels_dir))
+            datalab.fab.conn.sudo('ln -s /usr/lib64/R/ /usr/lib/R')
+            datalab.fab.conn.sudo('chown -R ' + os_user + ':' + os_user + ' /home/' + os_user + '/.local')
+            datalab.fab.conn.sudo('touch /home/{}/.ensure_dir/r_kernel_ensured'.format(os_user))
         except:
             sys.exit(1)
 
 
 def ensure_r(os_user, r_libs, region, r_mirror):
-    if not exists(conn,'/home/{}/.ensure_dir/r_ensured'.format(os_user)):
+    if not exists(datalab.fab.conn,'/home/{}/.ensure_dir/r_ensured'.format(os_user)):
         try:
             if region == 'cn-north-1':
                 r_repository = r_mirror
@@ -90,140 +90,140 @@ def ensure_r(os_user, r_libs, region, r_mirror):
                 r_repository = 'https://cloud.r-project.org'
             manage_pkg('-y install', 'remote', 'cmake')
             manage_pkg('-y install', 'remote', 'libcur*')
-            conn.sudo('echo -e "[base]\nname=CentOS-7-Base\nbaseurl=http://buildlogs.centos.org/centos/7/os/x86_64-20140704-1/\ngpgcheck=1\ngpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7\npriority=1\nexclude=php mysql" >> /etc/yum.repos.d/CentOS-base.repo')
+            datalab.fab.conn.sudo('echo -e "[base]\nname=CentOS-7-Base\nbaseurl=http://buildlogs.centos.org/centos/7/os/x86_64-20140704-1/\ngpgcheck=1\ngpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7\npriority=1\nexclude=php mysql" >> /etc/yum.repos.d/CentOS-base.repo')
             manage_pkg('-y install', 'remote', 'R R-core R-core-devel R-devel --nogpgcheck')
-            conn.sudo('R CMD javareconf')
-            conn.sudo('''bash -c 'cd /root; git clone https://github.com/zeromq/zeromq4-x.git; cd zeromq4-x/; mkdir build; cd build; cmake ..; make install; ldconfig' ''')
+            datalab.fab.conn.sudo('R CMD javareconf')
+            datalab.fab.conn.sudo('''bash -c 'cd /root; git clone https://github.com/zeromq/zeromq4-x.git; cd zeromq4-x/; mkdir build; cd build; cmake ..; make install; ldconfig' ''')
             for i in r_libs:
-                conn.sudo('R -e "install.packages(\'{}\',repos=\'{}\')"'.format(i, r_repository))
-            conn.sudo('R -e "library(\'devtools\');install.packages(repos=\'{}\',c(\'rzmq\',\'repr\',\'digest\',\'stringr\',\'RJSONIO\',\'functional\',\'plyr\'))"'.format(r_repository))
-            conn.sudo('R -e "library(\'devtools\');install_github(\'IRkernel/repr\');install_github(\'IRkernel/IRdisplay\');install_github(\'IRkernel/IRkernel\');"')
-            conn.sudo('R -e "library(\'devtools\');install_version(\'keras\', version = \'{}\', repos = \'{}\');"'.format(os.environ['notebook_keras_version'],r_repository))
-            conn.sudo('R -e "install.packages(\'RJDBC\',repos=\'{}\',dep=TRUE)"'.format(r_repository))
-            conn.sudo('touch /home/{}/.ensure_dir/r_ensured'.format(os_user))
+                datalab.fab.conn.sudo('R -e "install.packages(\'{}\',repos=\'{}\')"'.format(i, r_repository))
+            datalab.fab.conn.sudo('R -e "library(\'devtools\');install.packages(repos=\'{}\',c(\'rzmq\',\'repr\',\'digest\',\'stringr\',\'RJSONIO\',\'functional\',\'plyr\'))"'.format(r_repository))
+            datalab.fab.conn.sudo('R -e "library(\'devtools\');install_github(\'IRkernel/repr\');install_github(\'IRkernel/IRdisplay\');install_github(\'IRkernel/IRkernel\');"')
+            datalab.fab.conn.sudo('R -e "library(\'devtools\');install_version(\'keras\', version = \'{}\', repos = \'{}\');"'.format(os.environ['notebook_keras_version'],r_repository))
+            datalab.fab.conn.sudo('R -e "install.packages(\'RJDBC\',repos=\'{}\',dep=TRUE)"'.format(r_repository))
+            datalab.fab.conn.sudo('touch /home/{}/.ensure_dir/r_ensured'.format(os_user))
         except:
             sys.exit(1)
 
 
 def install_rstudio(os_user, local_spark_path, rstudio_pass, rstudio_version):
-    if not exists(conn,'/home/' + os_user + '/.ensure_dir/rstudio_ensured'):
+    if not exists(datalab.fab.conn,'/home/' + os_user + '/.ensure_dir/rstudio_ensured'):
         try:
             manage_pkg('-y install --nogpgcheck', 'remote', 'https://download2.rstudio.org/server/centos6/x86_64/rstudio-server-rhel-{}-x86_64.rpm'.format(rstudio_version))
-            conn.sudo('mkdir -p /mnt/var')
-            conn.sudo('chown {0}:{0} /mnt/var'.format(os_user))
-            conn.sudo("sed -i '/Type=forking/a \Environment=USER=datalab-user' /lib/systemd/system/rstudio-server.service")
-            conn.sudo(
+            datalab.fab.conn.sudo('mkdir -p /mnt/var')
+            datalab.fab.conn.sudo('chown {0}:{0} /mnt/var'.format(os_user))
+            datalab.fab.conn.sudo("sed -i '/Type=forking/a \Environment=USER=datalab-user' /lib/systemd/system/rstudio-server.service")
+            datalab.fab.conn.sudo(
                 "sed -i '/ExecStart/s|=/usr/lib/rstudio-server/bin/rserver|=/bin/bash -c \"export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/opt/cudnn/lib64:/usr/local/cuda/lib64; /usr/lib/rstudio-server/bin/rserver --auth-none 1|g' /lib/systemd/system/rstudio-server.service")
-            conn.sudo("sed -i '/ExecStart/s|$|\"|g' /lib/systemd/system/rstudio-server.service")
-            conn.sudo("systemctl daemon-reload")
-            conn.sudo('touch /home/{}/.Renviron'.format(os_user))
-            conn.sudo('chown {0}:{0} /home/{0}/.Renviron'.format(os_user))
-            conn.sudo('''echo 'SPARK_HOME="{0}"' >> /home/{1}/.Renviron'''.format(local_spark_path, os_user))
-            conn.sudo('touch /home/{}/.Rprofile'.format(os_user))
-            conn.sudo('chown {0}:{0} /home/{0}/.Rprofile'.format(os_user))
-            conn.sudo('''echo 'library(SparkR, lib.loc = c(file.path(Sys.getenv("SPARK_HOME"), "R", "lib")))' >> /home/{}/.Rprofile'''.format(os_user))
-            http_proxy = conn.run('''bash -l -c 'echo $http_proxy' ''').stdout.replace('\n','')
-            https_proxy = conn.run('''bash -l -c 'echo $https_proxy' ''').stdout.replace('\n','')
-            conn.sudo('''echo 'Sys.setenv(http_proxy = \"{}\")' >> /home/{}/.Rprofile'''.format(http_proxy, os_user))
-            conn.sudo('''echo 'Sys.setenv(https_proxy = \"{}\")' >> /home/{}/.Rprofile'''.format(https_proxy, os_user))
-            conn.sudo('rstudio-server start')
-            conn.sudo('''bash -c 'echo "{0}:{1}" | chpasswd' '''.format(os_user, rstudio_pass))
-            conn.sudo("sed -i '/exit 0/d' /etc/rc.local")
-            conn.sudo('''bash -c "echo \'sed -i 's/^#SPARK_HOME/SPARK_HOME/' /home/{}/.Renviron\' >> /etc/rc.local"'''.format(os_user))
-            conn.sudo("bash -c 'echo exit 0 >> /etc/rc.local'")
-            conn.sudo('touch /home/{}/.ensure_dir/rstudio_ensured'.format(os_user))
+            datalab.fab.conn.sudo("sed -i '/ExecStart/s|$|\"|g' /lib/systemd/system/rstudio-server.service")
+            datalab.fab.conn.sudo("systemctl daemon-reload")
+            datalab.fab.conn.sudo('touch /home/{}/.Renviron'.format(os_user))
+            datalab.fab.conn.sudo('chown {0}:{0} /home/{0}/.Renviron'.format(os_user))
+            datalab.fab.conn.sudo('''echo 'SPARK_HOME="{0}"' >> /home/{1}/.Renviron'''.format(local_spark_path, os_user))
+            datalab.fab.conn.sudo('touch /home/{}/.Rprofile'.format(os_user))
+            datalab.fab.conn.sudo('chown {0}:{0} /home/{0}/.Rprofile'.format(os_user))
+            datalab.fab.conn.sudo('''echo 'library(SparkR, lib.loc = c(file.path(Sys.getenv("SPARK_HOME"), "R", "lib")))' >> /home/{}/.Rprofile'''.format(os_user))
+            http_proxy = datalab.fab.conn.run('''bash -l -c 'echo $http_proxy' ''').stdout.replace('\n','')
+            https_proxy = datalab.fab.conn.run('''bash -l -c 'echo $https_proxy' ''').stdout.replace('\n','')
+            datalab.fab.conn.sudo('''echo 'Sys.setenv(http_proxy = \"{}\")' >> /home/{}/.Rprofile'''.format(http_proxy, os_user))
+            datalab.fab.conn.sudo('''echo 'Sys.setenv(https_proxy = \"{}\")' >> /home/{}/.Rprofile'''.format(https_proxy, os_user))
+            datalab.fab.conn.sudo('rstudio-server start')
+            datalab.fab.conn.sudo('''bash -c 'echo "{0}:{1}" | chpasswd' '''.format(os_user, rstudio_pass))
+            datalab.fab.conn.sudo("sed -i '/exit 0/d' /etc/rc.local")
+            datalab.fab.conn.sudo('''bash -c "echo \'sed -i 's/^#SPARK_HOME/SPARK_HOME/' /home/{}/.Renviron\' >> /etc/rc.local"'''.format(os_user))
+            datalab.fab.conn.sudo("bash -c 'echo exit 0 >> /etc/rc.local'")
+            datalab.fab.conn.sudo('touch /home/{}/.ensure_dir/rstudio_ensured'.format(os_user))
         except:
             sys.exit(1)
     else:
         try:
-            conn.sudo('''bash -c 'echo "{0}:{1}" | chpasswd' '''.format(os_user, rstudio_pass))
+            datalab.fab.conn.sudo('''bash -c 'echo "{0}:{1}" | chpasswd' '''.format(os_user, rstudio_pass))
         except:
             sys.exit(1)
 
 
 def ensure_matplot(os_user):
-    if not exists(conn,'/home/{}/.ensure_dir/matplot_ensured'.format(os_user)):
+    if not exists(datalab.fab.conn,'/home/{}/.ensure_dir/matplot_ensured'.format(os_user)):
         try:
-            conn.sudo('python3.5 -m pip install matplotlib=={} --no-cache-dir'.format(os.environ['notebook_matplotlib_version']))
+            datalab.fab.conn.sudo('python3.5 -m pip install matplotlib=={} --no-cache-dir'.format(os.environ['notebook_matplotlib_version']))
             if os.environ['application'] in ('tensor', 'deeplearning'):
-                conn.sudo('python3.8 -m pip install -U numpy=={} --no-cache-dir'.format(os.environ['notebook_numpy_version']))
-            conn.sudo('touch /home/{}/.ensure_dir/matplot_ensured'.format(os_user))
+                datalab.fab.conn.sudo('python3.8 -m pip install -U numpy=={} --no-cache-dir'.format(os.environ['notebook_numpy_version']))
+            datalab.fab.conn.sudo('touch /home/{}/.ensure_dir/matplot_ensured'.format(os_user))
         except:
             sys.exit(1)
 
 
 def ensure_sbt(os_user):
-    if not exists(conn,'/home/{}/.ensure_dir/sbt_ensured'.format(os_user)):
+    if not exists(datalab.fab.conn,'/home/{}/.ensure_dir/sbt_ensured'.format(os_user)):
         try:
-            conn.sudo('curl https://bintray.com/sbt/rpm/rpm | sudo tee /etc/yum.repos.d/bintray-sbt-rpm.repo')
+            datalab.fab.conn.sudo('curl https://bintray.com/sbt/rpm/rpm | sudo tee /etc/yum.repos.d/bintray-sbt-rpm.repo')
             manage_pkg('-y install', 'remote', 'sbt')
-            conn.sudo('touch /home/{}/.ensure_dir/sbt_ensured'.format(os_user))
+            datalab.fab.conn.sudo('touch /home/{}/.ensure_dir/sbt_ensured'.format(os_user))
         except:
             sys.exit(1)
 
 
 def ensure_jre_jdk(os_user):
-    if not exists(conn,'/home/' + os_user + '/.ensure_dir/jre_jdk_ensured'):
+    if not exists(datalab.fab.conn,'/home/' + os_user + '/.ensure_dir/jre_jdk_ensured'):
         try:
             manage_pkg('-y install', 'remote', 'java-1.8.0-openjdk')
             manage_pkg('-y install', 'remote', 'java-1.8.0-openjdk-devel')
-            conn.sudo('touch /home/' + os_user + '/.ensure_dir/jre_jdk_ensured')
+            datalab.fab.conn.sudo('touch /home/' + os_user + '/.ensure_dir/jre_jdk_ensured')
         except:
             sys.exit(1)
 
 
 def ensure_scala(scala_link, scala_version, os_user):
-    if not exists(conn,'/home/' + os_user + '/.ensure_dir/scala_ensured'):
+    if not exists(datalab.fab.conn,'/home/' + os_user + '/.ensure_dir/scala_ensured'):
         try:
-            conn.sudo('wget {}scala-{}.rpm -O /tmp/scala.rpm'.format(scala_link, scala_version))
-            conn.sudo('rpm -i /tmp/scala.rpm')
-            conn.sudo('touch /home/' + os_user + '/.ensure_dir/scala_ensured')
+            datalab.fab.conn.sudo('wget {}scala-{}.rpm -O /tmp/scala.rpm'.format(scala_link, scala_version))
+            datalab.fab.conn.sudo('rpm -i /tmp/scala.rpm')
+            datalab.fab.conn.sudo('touch /home/' + os_user + '/.ensure_dir/scala_ensured')
         except:
             sys.exit(1)
 
 
 def ensure_additional_python_libs(os_user):
-    if not exists(conn,'/home/' + os_user + '/.ensure_dir/additional_python_libs_ensured'):
+    if not exists(datalab.fab.conn,'/home/' + os_user + '/.ensure_dir/additional_python_libs_ensured'):
         try:
             manage_pkg('clean', 'remote', 'all')
             manage_pkg('-y install', 'remote', 'zlib-devel libjpeg-turbo-devel --nogpgcheck')
             if os.environ['application'] in ('jupyter', 'zeppelin'):
-                conn.sudo('python3.5 -m pip install NumPy=={} SciPy pandas Sympy Pillow sklearn --no-cache-dir'.format(os.environ['notebook_numpy_version']))
+                datalab.fab.conn.sudo('python3.5 -m pip install NumPy=={} SciPy pandas Sympy Pillow sklearn --no-cache-dir'.format(os.environ['notebook_numpy_version']))
             if os.environ['application'] in ('tensor', 'deeplearning'):
-                conn.sudo('python3.8 -m pip install opencv-python h5py --no-cache-dir')
-            conn.sudo('touch /home/' + os_user + '/.ensure_dir/additional_python_libs_ensured')
+                datalab.fab.conn.sudo('python3.8 -m pip install opencv-python h5py --no-cache-dir')
+            datalab.fab.conn.sudo('touch /home/' + os_user + '/.ensure_dir/additional_python_libs_ensured')
         except:
             sys.exit(1)
 
 
 def ensure_python3_specific_version(python3_version, os_user):
-    if not exists(conn,'/home/' + os_user + '/.ensure_dir/python3_specific_version_ensured'):
+    if not exists(datalab.fab.conn,'/home/' + os_user + '/.ensure_dir/python3_specific_version_ensured'):
         try:
             manage_pkg('-y install', 'remote', 'yum-utils python34 openssl-devel')
             manage_pkg('-y groupinstall', 'remote', 'development --nogpgcheck')
             if len(python3_version) < 4:
                 python3_version = python3_version + ".0"
-            conn.sudo('wget https://www.python.org/ftp/python/{0}/Python-{0}.tgz'.format(python3_version))
-            conn.sudo('tar xzf Python-{0}.tgz; cd Python-{0}; ./configure --prefix=/usr/local; make altinstall'.format(python3_version))
-            conn.sudo('touch /home/' + os_user + '/.ensure_dir/python3_specific_version_ensured')
+            datalab.fab.conn.sudo('wget https://www.python.org/ftp/python/{0}/Python-{0}.tgz'.format(python3_version))
+            datalab.fab.conn.sudo('tar xzf Python-{0}.tgz; cd Python-{0}; ./configure --prefix=/usr/local; make altinstall'.format(python3_version))
+            datalab.fab.conn.sudo('touch /home/' + os_user + '/.ensure_dir/python3_specific_version_ensured')
         except:
             sys.exit(1)
 
 def ensure_python3_libraries(os_user):
-    if not exists(conn,'/home/' + os_user + '/.ensure_dir/python3_libraries_ensured'):
+    if not exists(datalab.fab.conn,'/home/' + os_user + '/.ensure_dir/python3_libraries_ensured'):
         try:
             manage_pkg('-y install', 'remote', 'https://centos7.iuscommunity.org/ius-release.rpm')
             manage_pkg('-y install', 'remote', 'python35u python35u-pip python35u-devel')
-            conn.sudo('python3.5 -m pip install -U pip=={} setuptools --no-cache-dir'.format(os.environ['conf_pip_version']))
-            conn.sudo('python3.5 -m pip install boto3 --no-cache-dir')
-            conn.sudo('python3.5 -m pip install fabvenv fabric-virtualenv future --no-cache-dir')
+            datalab.fab.conn.sudo('python3.5 -m pip install -U pip=={} setuptools --no-cache-dir'.format(os.environ['conf_pip_version']))
+            datalab.fab.conn.sudo('python3.5 -m pip install boto3 --no-cache-dir')
+            datalab.fab.conn.sudo('python3.5 -m pip install fabvenv fabric-virtualenv future --no-cache-dir')
             try:
-                conn.sudo('python3.5 -m pip install tornado=={0} ipython==7.9.0 ipykernel=={1} --no-cache-dir' \
+                datalab.fab.conn.sudo('python3.5 -m pip install tornado=={0} ipython==7.9.0 ipykernel=={1} --no-cache-dir' \
                      .format(os.environ['notebook_tornado_version'], os.environ['notebook_ipykernel_version']))
             except:
-                conn.sudo('python3.5 -m pip install tornado=={0} ipython==5.0.0 ipykernel=={1} --no-cache-dir' \
+                datalab.fab.conn.sudo('python3.5 -m pip install tornado=={0} ipython==5.0.0 ipykernel=={1} --no-cache-dir' \
                      .format(os.environ['notebook_tornado_version'], os.environ['notebook_ipykernel_version']))
-            conn.sudo('touch /home/' + os_user + '/.ensure_dir/python3_libraries_ensured')
+            datalab.fab.conn.sudo('touch /home/' + os_user + '/.ensure_dir/python3_libraries_ensured')
         except:
             sys.exit(1)
 
@@ -231,64 +231,64 @@ def ensure_python3_libraries(os_user):
 def install_tensor(os_user, cuda_version, cuda_file_name,
                    cudnn_version, cudnn_file_name, tensorflow_version,
                    templates_dir, nvidia_version):
-    if not exists(conn,'/home/{}/.ensure_dir/tensor_ensured'.format(os_user)):
+    if not exists(datalab.fab.conn,'/home/{}/.ensure_dir/tensor_ensured'.format(os_user)):
         try:
             # install nvidia drivers
-            conn.sudo('''bash -c 'echo "blacklist nouveau" >> /etc/modprobe.d/blacklist-nouveau.conf' ''')
-            conn.sudo('''bash -c 'echo "options nouveau modeset=0" >> /etc/modprobe.d/blacklist-nouveau.conf' ''')
-            conn.sudo('dracut --force')
-            conn.sudo('reboot', warn=True)
+            datalab.fab.conn.sudo('''bash -c 'echo "blacklist nouveau" >> /etc/modprobe.d/blacklist-nouveau.conf' ''')
+            datalab.fab.conn.sudo('''bash -c 'echo "options nouveau modeset=0" >> /etc/modprobe.d/blacklist-nouveau.conf' ''')
+            datalab.fab.conn.sudo('dracut --force')
+            datalab.fab.conn.sudo('reboot', warn=True)
             time.sleep(150)
             manage_pkg('-y install', 'remote', 'libglvnd-opengl libglvnd-devel dkms gcc kernel-devel-$(uname -r) kernel-headers-$(uname -r)')
-            conn.sudo('wget http://us.download.nvidia.com/XFree86/Linux-x86_64/{0}/NVIDIA-Linux-x86_64-{0}.run -O /home/{1}/NVIDIA-Linux-x86_64-{0}.run'.format(nvidia_version, os_user))
-            conn.sudo('/bin/bash /home/{0}/NVIDIA-Linux-x86_64-{1}.run -s --dkms'.format(os_user, nvidia_version))
-            conn.sudo('rm -f /home/{0}/NVIDIA-Linux-x86_64-{1}.run'.format(os_user, nvidia_version))
+            datalab.fab.conn.sudo('wget http://us.download.nvidia.com/XFree86/Linux-x86_64/{0}/NVIDIA-Linux-x86_64-{0}.run -O /home/{1}/NVIDIA-Linux-x86_64-{0}.run'.format(nvidia_version, os_user))
+            datalab.fab.conn.sudo('/bin/bash /home/{0}/NVIDIA-Linux-x86_64-{1}.run -s --dkms'.format(os_user, nvidia_version))
+            datalab.fab.conn.sudo('rm -f /home/{0}/NVIDIA-Linux-x86_64-{1}.run'.format(os_user, nvidia_version))
             # install cuda
-            conn.sudo('python3.5 -m pip install --upgrade pip=={0} wheel numpy=={1} --no-cache-dir'. format(os.environ['conf_pip_version'], os.environ['notebook_numpy_version']))
-            conn.sudo('wget -P /opt https://developer.nvidia.com/compute/cuda/{0}/prod/local_installers/{1}'.format(cuda_version, cuda_file_name))
-            conn.sudo('sh /opt/{} --silent --toolkit'.format(cuda_file_name))
-            conn.sudo('mv /usr/local/cuda-{} /opt/'.format(cuda_version[:-2]))
-            conn.sudo('ln -s /opt/cuda-{0} /usr/local/cuda-{0}'.format(cuda_version[:-2]))
-            conn.sudo('rm -f /opt/{}'.format(cuda_file_name))
+            datalab.fab.conn.sudo('python3.5 -m pip install --upgrade pip=={0} wheel numpy=={1} --no-cache-dir'. format(os.environ['conf_pip_version'], os.environ['notebook_numpy_version']))
+            datalab.fab.conn.sudo('wget -P /opt https://developer.nvidia.com/compute/cuda/{0}/prod/local_installers/{1}'.format(cuda_version, cuda_file_name))
+            datalab.fab.conn.sudo('sh /opt/{} --silent --toolkit'.format(cuda_file_name))
+            datalab.fab.conn.sudo('mv /usr/local/cuda-{} /opt/'.format(cuda_version[:-2]))
+            datalab.fab.conn.sudo('ln -s /opt/cuda-{0} /usr/local/cuda-{0}'.format(cuda_version[:-2]))
+            datalab.fab.conn.sudo('rm -f /opt/{}'.format(cuda_file_name))
             # install cuDNN
-            conn.run('wget http://developer.download.nvidia.com/compute/redist/cudnn/v{0}/{1} -O /tmp/{1}'.format(cudnn_version, cudnn_file_name))
-            conn.run('tar xvzf /tmp/{} -C /tmp'.format(cudnn_file_name))
-            conn.sudo('mkdir -p /opt/cudnn/include')
-            conn.sudo('mkdir -p /opt/cudnn/lib64')
-            conn.sudo('mv /tmp/cuda/include/cudnn.h /opt/cudnn/include')
-            conn.sudo('mv /tmp/cuda/lib64/libcudnn* /opt/cudnn/lib64')
-            conn.sudo('chmod a+r /opt/cudnn/include/cudnn.h /opt/cudnn/lib64/libcudnn*')
-            conn.run('''bash -l -c 'echo "export LD_LIBRARY_PATH=\"$LD_LIBRARY_PATH:/opt/cudnn/lib64:/usr/local/cuda/lib64\"" >> ~/.bashrc' ''')
+            datalab.fab.conn.run('wget http://developer.download.nvidia.com/compute/redist/cudnn/v{0}/{1} -O /tmp/{1}'.format(cudnn_version, cudnn_file_name))
+            datalab.fab.conn.run('tar xvzf /tmp/{} -C /tmp'.format(cudnn_file_name))
+            datalab.fab.conn.sudo('mkdir -p /opt/cudnn/include')
+            datalab.fab.conn.sudo('mkdir -p /opt/cudnn/lib64')
+            datalab.fab.conn.sudo('mv /tmp/cuda/include/cudnn.h /opt/cudnn/include')
+            datalab.fab.conn.sudo('mv /tmp/cuda/lib64/libcudnn* /opt/cudnn/lib64')
+            datalab.fab.conn.sudo('chmod a+r /opt/cudnn/include/cudnn.h /opt/cudnn/lib64/libcudnn*')
+            datalab.fab.conn.run('''bash -l -c 'echo "export LD_LIBRARY_PATH=\"$LD_LIBRARY_PATH:/opt/cudnn/lib64:/usr/local/cuda/lib64\"" >> ~/.bashrc' ''')
             # install TensorFlow and run TensorBoard
-            conn.sudo('wget https://storage.googleapis.com/tensorflow/linux/gpu/tensorflow_gpu-{}-cp27-none-linux_x86_64.whl'.format(tensorflow_version))
-            conn.sudo('wget https://storage.googleapis.com/tensorflow/linux/gpu/tensorflow_gpu-{}-cp35-cp35m-linux_x86_64.whl'.format(tensorflow_version))
-            conn.sudo('python3.8 -m pip install --upgrade tensorflow_gpu-{}-cp35-cp35m-linux_x86_64.whl --no-cache-dir'.format(tensorflow_version))
-            conn.sudo('rm -rf /home/{}/tensorflow_gpu-*'.format(os_user))
-            conn.sudo('mkdir /var/log/tensorboard; chown {0}:{0} -R /var/log/tensorboard'.format(os_user))
-            conn.put('{}tensorboard.service'.format(templates_dir), '/tmp/tensorboard.service')
-            conn.sudo("sed -i 's|OS_USR|{}|' /tmp/tensorboard.service".format(os_user))
-            conn.sudo("chmod 644 /tmp/tensorboard.service")
-            conn.sudo('\cp /tmp/tensorboard.service /etc/systemd/system/')
-            conn.sudo("systemctl daemon-reload")
-            conn.sudo("systemctl enable tensorboard")
-            conn.sudo("systemctl start tensorboard")
-            conn.sudo('touch /home/{}/.ensure_dir/tensor_ensured'.format(os_user))
+            datalab.fab.conn.sudo('wget https://storage.googleapis.com/tensorflow/linux/gpu/tensorflow_gpu-{}-cp27-none-linux_x86_64.whl'.format(tensorflow_version))
+            datalab.fab.conn.sudo('wget https://storage.googleapis.com/tensorflow/linux/gpu/tensorflow_gpu-{}-cp35-cp35m-linux_x86_64.whl'.format(tensorflow_version))
+            datalab.fab.conn.sudo('python3.8 -m pip install --upgrade tensorflow_gpu-{}-cp35-cp35m-linux_x86_64.whl --no-cache-dir'.format(tensorflow_version))
+            datalab.fab.conn.sudo('rm -rf /home/{}/tensorflow_gpu-*'.format(os_user))
+            datalab.fab.conn.sudo('mkdir /var/log/tensorboard; chown {0}:{0} -R /var/log/tensorboard'.format(os_user))
+            datalab.fab.conn.put('{}tensorboard.service'.format(templates_dir), '/tmp/tensorboard.service')
+            datalab.fab.conn.sudo("sed -i 's|OS_USR|{}|' /tmp/tensorboard.service".format(os_user))
+            datalab.fab.conn.sudo("chmod 644 /tmp/tensorboard.service")
+            datalab.fab.conn.sudo('\cp /tmp/tensorboard.service /etc/systemd/system/')
+            datalab.fab.conn.sudo("systemctl daemon-reload")
+            datalab.fab.conn.sudo("systemctl enable tensorboard")
+            datalab.fab.conn.sudo("systemctl start tensorboard")
+            datalab.fab.conn.sudo('touch /home/{}/.ensure_dir/tensor_ensured'.format(os_user))
         except:
             sys.exit(1)
 
 
 def install_maven(os_user):
-    if not exists(conn,'/home/' + os_user + '/.ensure_dir/maven_ensured'):
-        conn.sudo('wget http://apache.volia.net/maven/maven-3/3.3.9/binaries/apache-maven-3.3.9-bin.tar.gz -O /tmp/maven.tar.gz')
-        conn.sudo('tar -zxvf /tmp/maven.tar.gz -C /opt/')
-        conn.sudo('ln -fs /opt/apache-maven-3.3.9/bin/mvn /usr/bin/mvn')
-        conn.sudo('touch /home/' + os_user + '/.ensure_dir/maven_ensured')
+    if not exists(datalab.fab.conn,'/home/' + os_user + '/.ensure_dir/maven_ensured'):
+        datalab.fab.conn.sudo('wget http://apache.volia.net/maven/maven-3/3.3.9/binaries/apache-maven-3.3.9-bin.tar.gz -O /tmp/maven.tar.gz')
+        datalab.fab.conn.sudo('tar -zxvf /tmp/maven.tar.gz -C /opt/')
+        datalab.fab.conn.sudo('ln -fs /opt/apache-maven-3.3.9/bin/mvn /usr/bin/mvn')
+        datalab.fab.conn.sudo('touch /home/' + os_user + '/.ensure_dir/maven_ensured')
 
 
 def install_livy_dependencies(os_user):
-    if not exists(conn,'/home/' + os_user + '/.ensure_dir/livy_dependencies_ensured'):
-        conn.sudo('pip3.5 install cloudpickle requests requests-kerberos flake8 flaky pytest --no-cache-dir')
-        conn.sudo('touch /home/' + os_user + '/.ensure_dir/livy_dependencies_ensured')
+    if not exists(datalab.fab.conn,'/home/' + os_user + '/.ensure_dir/livy_dependencies_ensured'):
+        datalab.fab.conn.sudo('pip3.5 install cloudpickle requests requests-kerberos flake8 flaky pytest --no-cache-dir')
+        datalab.fab.conn.sudo('touch /home/' + os_user + '/.ensure_dir/livy_dependencies_ensured')
 
 
 def install_maven_emr(os_user):
@@ -306,10 +306,10 @@ def install_livy_dependencies_emr(os_user):
 
 
 def install_nodejs(os_user):
-    if not exists(conn,'/home/{}/.ensure_dir/nodejs_ensured'.format(os_user)):
-        conn.sudo('curl -sL https://rpm.nodesource.com/setup_6.x | sudo -E bash -')
+    if not exists(datalab.fab.conn,'/home/{}/.ensure_dir/nodejs_ensured'.format(os_user)):
+        datalab.fab.conn.sudo('curl -sL https://rpm.nodesource.com/setup_6.x | sudo -E bash -')
         manage_pkg('-y install', 'remote', 'nodejs')
-        conn.sudo('touch /home/{}/.ensure_dir/nodejs_ensured'.format(os_user))
+        datalab.fab.conn.sudo('touch /home/{}/.ensure_dir/nodejs_ensured'.format(os_user))
 
 
 def install_os_pkg(requisites):
@@ -319,7 +319,7 @@ def install_os_pkg(requisites):
     try:
         print("Updating repositories and installing requested tools: {}".format(requisites))
         manage_pkg('update-minimal --security -y --skip-broken', 'remote', '')
-        conn.sudo('export LC_ALL=C')
+        datalab.fab.conn.sudo('export LC_ALL=C')
         for os_pkg in requisites:
             name, vers = os_pkg
             if vers != '' and vers !='N/A':
@@ -330,22 +330,22 @@ def install_os_pkg(requisites):
                 os_pkg = name
             manage_pkg('-y install', 'remote', '{0} --nogpgcheck 2>&1 | tee /tmp/tee.tmp; if ! grep -w -E  "({1})" '
                                                '/tmp/tee.tmp >  /tmp/os_install_{2}.log; then  echo "" > /tmp/os_install_{2}.log;fi'.format(os_pkg, error_parser, name))
-            install_output = conn.sudo('cat /tmp/tee.tmp').stdout
-            err = conn.sudo('cat /tmp/os_install_{}.log'.format(name)).stdout.replace('"', "'")
-            conn.sudo('cat /tmp/tee.tmp | if ! grep -w -E -A 30 "({1})" /tmp/tee.tmp > '
+            install_output = datalab.fab.conn.sudo('cat /tmp/tee.tmp').stdout
+            err = datalab.fab.conn.sudo('cat /tmp/os_install_{}.log'.format(name)).stdout.replace('"', "'")
+            datalab.fab.conn.sudo('cat /tmp/tee.tmp | if ! grep -w -E -A 30 "({1})" /tmp/tee.tmp > '
                  '/tmp/os_install_{0}.log; then echo "" > /tmp/os_install_{0}.log;fi'.format(name, new_pkgs_parser))
-            dep = conn.sudo('cat /tmp/os_install_{}.log'.format(name)).stdout
+            dep = datalab.fab.conn.sudo('cat /tmp/os_install_{}.log'.format(name)).stdout
             if dep == '':
                 dep = []
             else:
                 dep = dep[len(new_pkgs_parser): dep.find("Complete!") - 1].replace('  ', '').strip().split('\r\n')
                 for n, i in enumerate(dep):
                     i = i.split('.')[0]
-                    conn.sudo('yum info {0} 2>&1 | if ! grep Version > /tmp/os_install_{0}.log; then echo "" > /tmp/os_install_{0}.log;fi'.format(i))
+                    datalab.fab.conn.sudo('yum info {0} 2>&1 | if ! grep Version > /tmp/os_install_{0}.log; then echo "" > /tmp/os_install_{0}.log;fi'.format(i))
                     dep[n] =sudo('cat /tmp/os_install_{}.log'.format(i)).replace('Version     : ', '{} v.'.format(i))
                 dep = [i for i in dep if i]
             versions = []
-            res = conn.sudo(
+            res = datalab.fab.conn.sudo(
                 'python3 -c "import os,sys,yum; yb = yum.YumBase(); pl = yb.doPackageLists(); print [pkg.vr for pkg in pl.installed if pkg.name == \'{0}\']"'.format(
                     name)).stdout.split('\r\n')[1]
             if err:
@@ -354,7 +354,7 @@ def install_os_pkg(requisites):
                 version = res.split("'")[1].split("-")[0]
                 status_msg = "installed"
             if 'No package {} available'.format(os_pkg) in install_output:
-                versions = conn.sudo('yum --showduplicates list ' + name + ' | expand | grep ' + name + ' | awk \'{print $2}\'').stdout.replace('\r\n', '')
+                versions = datalab.fab.conn.sudo('yum --showduplicates list ' + name + ' | expand | grep ' + name + ' | awk \'{print $2}\'').stdout.replace('\r\n', '')
                 if versions and versions != 'Error: No matching Packages to list':
                     versions = versions.split(' ')
                     status_msg = 'invalid_version'
@@ -388,7 +388,7 @@ def get_available_os_pkgs():
     try:
         manage_pkg('update-minimal --security -y --skip-broken', 'remote', '')
         downgrade_python_version()
-        yum_raw = conn.sudo('python3 -c "import os,sys,yum; yb = yum.YumBase(); pl = yb.doPackageLists(); '
+        yum_raw = datalab.fab.conn.sudo('python3 -c "import os,sys,yum; yb = yum.YumBase(); pl = yb.doPackageLists(); '
                             'print {pkg.name:pkg.vr for pkg in pl.available}"').stdout
         yum_re = re.sub\
             (r'\w*\s\w*\D\s\w*.\w*.\s\w*.\w*.\w.\w*.\w*.\w*', '', yum_raw)
@@ -401,67 +401,67 @@ def get_available_os_pkgs():
 
 
 def install_opencv(os_user):
-    if not exists(conn,'/home/{}/.ensure_dir/opencv_ensured'.format(os_user)):
+    if not exists(datalab.fab.conn,'/home/{}/.ensure_dir/opencv_ensured'.format(os_user)):
         manage_pkg('-y install', 'remote', 'cmake python34 python34-devel python34-pip gcc gcc-c++')
-        conn.sudo('pip3.4 install numpy=={} --no-cache-dir'.format(os.environ['notebook_numpy_version']))
-        conn.sudo('pip3.5 install numpy=={} --no-cache-dir'.format(os.environ['notebook_numpy_version']))
-        conn.run('git clone https://github.com/opencv/opencv.git')
-        conn.run('cd /home/{}/opencv/ && git checkout 3.2.0'.format(os_user))
-        conn.run('cd /home/{}/opencv/ && mkdir release'.format(os_user))
-        conn.run('cd /home/{}/opencv/release/ && cmake -DINSTALL_TESTS=OFF -D CUDA_GENERATION=Auto -D CMAKE_BUILD_TYPE=RELEASE -D CMAKE_INSTALL_PREFIX=$(python2 -c "import sys; print(sys.prefix)") -D PYTHON_EXECUTABLE=$(which python2) ..')
-        conn.run('cd /home/{}/opencv/release/ && make -j$(nproc)')
-        conn.sudo('''bash -c 'cd /home/{}/opencv/release/ &&  make install' ''')
-        conn.sudo('touch /home/' + os_user + '/.ensure_dir/opencv_ensured')
+        datalab.fab.conn.sudo('pip3.4 install numpy=={} --no-cache-dir'.format(os.environ['notebook_numpy_version']))
+        datalab.fab.conn.sudo('pip3.5 install numpy=={} --no-cache-dir'.format(os.environ['notebook_numpy_version']))
+        datalab.fab.conn.run('git clone https://github.com/opencv/opencv.git')
+        datalab.fab.conn.run('cd /home/{}/opencv/ && git checkout 3.2.0'.format(os_user))
+        datalab.fab.conn.run('cd /home/{}/opencv/ && mkdir release'.format(os_user))
+        datalab.fab.conn.run('cd /home/{}/opencv/release/ && cmake -DINSTALL_TESTS=OFF -D CUDA_GENERATION=Auto -D CMAKE_BUILD_TYPE=RELEASE -D CMAKE_INSTALL_PREFIX=$(python2 -c "import sys; print(sys.prefix)") -D PYTHON_EXECUTABLE=$(which python2) ..')
+        datalab.fab.conn.run('cd /home/{}/opencv/release/ && make -j$(nproc)')
+        datalab.fab.conn.sudo('''bash -c 'cd /home/{}/opencv/release/ &&  make install' ''')
+        datalab.fab.conn.sudo('touch /home/' + os_user + '/.ensure_dir/opencv_ensured')
 
 
 def install_caffe2(os_user, caffe2_version, cmake_version):
-    if not exists(conn,'/home/{}/.ensure_dir/caffe2_ensured'.format(os_user)):
+    if not exists(datalab.fab.conn,'/home/{}/.ensure_dir/caffe2_ensured'.format(os_user)):
         env.shell = "/bin/bash -l -c -i"
         manage_pkg('update-minimal --security -y', 'remote', '')
         manage_pkg('-y install --nogpgcheck', 'remote', 'automake cmake3 gcc gcc-c++ kernel-devel leveldb-devel lmdb-devel libtool protobuf-devel graphviz')
-        conn.sudo('pip3.5 install flask graphviz hypothesis jupyter matplotlib=={} numpy=={} protobuf pydot python-nvd3 pyyaml '
+        datalab.fab.conn.sudo('pip3.5 install flask graphviz hypothesis jupyter matplotlib=={} numpy=={} protobuf pydot python-nvd3 pyyaml '
              'requests scikit-image scipy setuptools tornado future --no-cache-dir'.format(os.environ['notebook_matplotlib_version'], os.environ['notebook_numpy_version']))
-        conn.sudo('cp /opt/cudnn/include/* /opt/cuda-8.0/include/')
-        conn.sudo('cp /opt/cudnn/lib64/* /opt/cuda-8.0/lib64/')
-        conn.sudo('wget https://cmake.org/files/v{2}/cmake-{1}.tar.gz -O /home/{0}/cmake-{1}.tar.gz'.format(
+        datalab.fab.conn.sudo('cp /opt/cudnn/include/* /opt/cuda-8.0/include/')
+        datalab.fab.conn.sudo('cp /opt/cudnn/lib64/* /opt/cuda-8.0/lib64/')
+        datalab.fab.conn.sudo('wget https://cmake.org/files/v{2}/cmake-{1}.tar.gz -O /home/{0}/cmake-{1}.tar.gz'.format(
             os_user, cmake_version, cmake_version.split('.')[0] + "." + cmake_version.split('.')[1]))
-        conn.sudo('tar -zxvf cmake-{}.tar.gz'.format(cmake_version))
-        conn.sudo('''bash -c 'cd /home/{}/cmake-{}/ && ./bootstrap --prefix=/usr/local && make && make install' '''.format(os_user, cmake_version))
-        conn.sudo('ln -s /usr/local/bin/cmake /bin/cmake{}'.format(cmake_version))
-        conn.sudo('git clone https://github.com/pytorch/pytorch.git')
-        conn.sudo('''bash -c 'cd /home/{}/pytorch/ && git submodule update --init' '''.format(os_user))
-        conn.sudo('''bash -c 'cd /home/{}/pytorch/ && git checkout v{}' '''.format(os_user, caffe2_version), warn=True)
-        conn.sudo('''bash -c 'cd /home/{}/pytorch/ && git submodule update --recursive' '''.format(os_user), warn=True)
-        conn.sudo('''bash -c 'cd /home/{}/pytorch/ && mkdir build && cd build && cmake{} .. && make "-j$(nproc)" install' '''.format(os_user, cmake_version))
-        conn.sudo('touch /home/' + os_user + '/.ensure_dir/caffe2_ensured')
+        datalab.fab.conn.sudo('tar -zxvf cmake-{}.tar.gz'.format(cmake_version))
+        datalab.fab.conn.sudo('''bash -c 'cd /home/{}/cmake-{}/ && ./bootstrap --prefix=/usr/local && make && make install' '''.format(os_user, cmake_version))
+        datalab.fab.conn.sudo('ln -s /usr/local/bin/cmake /bin/cmake{}'.format(cmake_version))
+        datalab.fab.conn.sudo('git clone https://github.com/pytorch/pytorch.git')
+        datalab.fab.conn.sudo('''bash -c 'cd /home/{}/pytorch/ && git submodule update --init' '''.format(os_user))
+        datalab.fab.conn.sudo('''bash -c 'cd /home/{}/pytorch/ && git checkout v{}' '''.format(os_user, caffe2_version), warn=True)
+        datalab.fab.conn.sudo('''bash -c 'cd /home/{}/pytorch/ && git submodule update --recursive' '''.format(os_user), warn=True)
+        datalab.fab.conn.sudo('''bash -c 'cd /home/{}/pytorch/ && mkdir build && cd build && cmake{} .. && make "-j$(nproc)" install' '''.format(os_user, cmake_version))
+        datalab.fab.conn.sudo('touch /home/' + os_user + '/.ensure_dir/caffe2_ensured')
 
 
 def install_cntk(os_user, cntk_version):
-    if not exists(conn,'/home/{}/.ensure_dir/cntk_ensured'.format(os_user)):
-        conn.sudo('echo "exclude=*.i386 *.i686" >> /etc/yum.conf')
+    if not exists(datalab.fab.conn,'/home/{}/.ensure_dir/cntk_ensured'.format(os_user)):
+        datalab.fab.conn.sudo('echo "exclude=*.i386 *.i686" >> /etc/yum.conf')
         manage_pkg('clean', 'remote', 'all')
         manage_pkg('update-minimal --security -y', 'remote', '')
         manage_pkg('-y install --nogpgcheck', 'remote', 'openmpi openmpi-devel')
-        conn.sudo('pip3.5 install https://cntk.ai/PythonWheel/GPU/cntk-{}-cp35-cp35m-linux_x86_64.whl --no-cache-dir'.format(cntk_version))
-        conn.sudo('touch /home/{}/.ensure_dir/cntk_ensured'.format(os_user))
+        datalab.fab.conn.sudo('pip3.5 install https://cntk.ai/PythonWheel/GPU/cntk-{}-cp35-cp35m-linux_x86_64.whl --no-cache-dir'.format(cntk_version))
+        datalab.fab.conn.sudo('touch /home/{}/.ensure_dir/cntk_ensured'.format(os_user))
 
 
 def install_keras(os_user, keras_version):
-    if not exists(conn,'/home/{}/.ensure_dir/keras_ensured'.format(os_user)):
-        conn.sudo('pip3.5 install keras=={} --no-cache-dir'.format(keras_version))
-        conn.sudo('touch /home/{}/.ensure_dir/keras_ensured'.format(os_user))
+    if not exists(datalab.fab.conn,'/home/{}/.ensure_dir/keras_ensured'.format(os_user)):
+        datalab.fab.conn.sudo('pip3.5 install keras=={} --no-cache-dir'.format(keras_version))
+        datalab.fab.conn.sudo('touch /home/{}/.ensure_dir/keras_ensured'.format(os_user))
 
 
 def install_theano(os_user, theano_version):
-    if not exists(conn,'/home/{}/.ensure_dir/theano_ensured'.format(os_user)):
-        conn.sudo('python3.8 -m pip install Theano=={} --no-cache-dir'.format(theano_version))
-        conn.sudo('touch /home/{}/.ensure_dir/theano_ensured'.format(os_user))
+    if not exists(datalab.fab.conn,'/home/{}/.ensure_dir/theano_ensured'.format(os_user)):
+        datalab.fab.conn.sudo('python3.8 -m pip install Theano=={} --no-cache-dir'.format(theano_version))
+        datalab.fab.conn.sudo('touch /home/{}/.ensure_dir/theano_ensured'.format(os_user))
 
 
 def install_mxnet(os_user, mxnet_version):
-    if not exists(conn,'/home/{}/.ensure_dir/mxnet_ensured'.format(os_user)):
-        conn.sudo('pip3.5 install mxnet-cu80=={} opencv-python --no-cache-dir'.format(mxnet_version))
-        conn.sudo('touch /home/{}/.ensure_dir/mxnet_ensured'.format(os_user))
+    if not exists(datalab.fab.conn,'/home/{}/.ensure_dir/mxnet_ensured'.format(os_user)):
+        datalab.fab.conn.sudo('pip3.5 install mxnet-cu80=={} opencv-python --no-cache-dir'.format(mxnet_version))
+        datalab.fab.conn.sudo('touch /home/{}/.ensure_dir/mxnet_ensured'.format(os_user))
 
 
 #def install_torch(os_user):
@@ -476,7 +476,7 @@ def install_mxnet(os_user, mxnet_version):
 
 def install_gitlab_cert(os_user, certfile):
     try:
-        conn.sudo('mv -f /home/{0}/{1} /etc/pki/ca-trust/source/anchors/{1}'.format(os_user, certfile))
-        conn.sudo('update-ca-trust')
+        datalab.fab.conn.sudo('mv -f /home/{0}/{1} /etc/pki/ca-trust/source/anchors/{1}'.format(os_user, certfile))
+        datalab.fab.conn.sudo('update-ca-trust')
     except Exception as err:
         print('Failed to install gitlab certificate.{}'.format(str(err)))
diff --git a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_configure.py b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_configure.py
index 9a1f418..2cbec9b 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_configure.py
@@ -84,8 +84,7 @@ def configure_dataengine_service(instance, emr_conf):
                                                           emr_conf['key_path'], True)
             global conn
             conn = datalab.fab.init_datalab_connection(emr_conf['instance_ip'], emr_conf['os_user'], emr_conf['key_path'])
-            conn.sudo('echo "[main]" > /etc/yum/pluginconf.d/priorities.conf ; echo "enabled = 0" >> '
-                 '/etc/yum/pluginconf.d/priorities.conf')
+            conn.sudo('''bash -c 'echo "[main]" > /etc/yum/pluginconf.d/priorities.conf ; echo "enabled = 0" >> /etc/yum/pluginconf.d/priorities.conf' ''')
             manage_pkg('-y install', 'remote', 'R-devel')
             conn.close()
         except:
diff --git a/infrastructure-provisioning/src/general/scripts/os/common_configure_proxy.py b/infrastructure-provisioning/src/general/scripts/os/common_configure_proxy.py
index 5875b22..604a23a 100644
--- a/infrastructure-provisioning/src/general/scripts/os/common_configure_proxy.py
+++ b/infrastructure-provisioning/src/general/scripts/os/common_configure_proxy.py
@@ -24,6 +24,7 @@
 import argparse
 import json
 from datalab.notebook_lib import *
+from datalab.fab import *
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -45,6 +46,6 @@ if __name__ == "__main__":
     deeper_config = json.loads(args.additional_config)
 
     print("Enabling proxy for notebook server for repositories access.")
-    enable_proxy(deeper_config['proxy_host'], deeper_config['proxy_port'])
+    datalab.notebook_lib.enable_proxy(deeper_config['proxy_host'], deeper_config['proxy_port'])
 
     conn.close()

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org