You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@datalab.apache.org by lf...@apache.org on 2021/02/02 15:13:19 UTC

[incubator-datalab] branch DATALAB-2091 updated: [DATALAB-2091]: replaced sudo() run() put() with conn.sudo() conn.run() conn.put()

This is an automated email from the ASF dual-hosted git repository.

lfrolov pushed a commit to branch DATALAB-2091
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git


The following commit(s) were added to refs/heads/DATALAB-2091 by this push:
     new 71bc158  [DATALAB-2091]: replaced sudo() run() put() with conn.sudo() conn.run() conn.put()
71bc158 is described below

commit 71bc15872cb912535ee37d845630b367b1d4ad5d
Author: leonidfrolov <fr...@gmail.com>
AuthorDate: Tue Feb 2 17:13:00 2021 +0200

    [DATALAB-2091]: replaced sudo() run() put() with conn.sudo() conn.run() conn.put()
---
 README.md                                          |   2 +-
 .../scripts/deploy_keycloak/deploy_keycloak.py     |  60 +--
 .../scripts/deploy_repository/deploy_repository.py | 366 ++++++-------
 .../src/base/scripts/create_ssh_user.py            |  20 +-
 .../src/base/scripts/install_prerequisites.py      |  12 +-
 .../src/base/scripts/install_user_key.py           |   6 +-
 .../src/dataengine/scripts/configure_dataengine.py |  66 +--
 .../scripts/configure_deep_learning_node.py        |  10 +-
 .../src/general/lib/aws/actions_lib.py             | 134 ++---
 .../src/general/lib/azure/actions_lib.py           | 174 +++---
 .../src/general/lib/gcp/actions_lib.py             | 126 ++---
 .../src/general/lib/os/debian/common_lib.py        | 120 ++---
 .../src/general/lib/os/debian/edge_lib.py          | 150 +++---
 .../src/general/lib/os/debian/notebook_lib.py      | 316 +++++------
 .../src/general/lib/os/debian/ssn_lib.py           | 198 +++----
 .../src/general/lib/os/fab.py                      | 598 ++++++++++-----------
 .../src/general/lib/os/redhat/common_lib.py        |  70 +--
 .../src/general/lib/os/redhat/edge_lib.py          | 184 +++----
 .../src/general/lib/os/redhat/notebook_lib.py      | 298 +++++-----
 .../src/general/lib/os/redhat/ssn_lib.py           | 212 ++++----
 .../scripts/aws/common_download_git_certfile.py    |   4 +-
 .../scripts/aws/dataengine-service_configure.py    |   2 +-
 .../jupyter_install_dataengine-service_kernels.py  |  34 +-
 .../rstudio_install_dataengine-service_kernels.py  |  20 +-
 .../zeppelin_install_dataengine-service_kernels.py |  24 +-
 .../scripts/azure/common_download_git_certfile.py  |   4 +-
 .../general/scripts/azure/common_start_notebook.py |   6 +-
 .../general/scripts/azure/rstudio_change_pass.py   |   2 +-
 .../scripts/gcp/common_download_git_certfile.py    |   4 +-
 .../jupyter_install_dataengine-service_kernels.py  |  38 +-
 .../rstudio_install_dataengine-service_kernels.py  |  24 +-
 .../zeppelin_install_dataengine-service_kernels.py |  24 +-
 .../src/general/scripts/os/check_inactivity.py     |   2 +-
 .../general/scripts/os/common_clean_instance.py    |  66 +--
 .../scripts/os/common_configure_reverse_proxy.py   |   4 +-
 .../general/scripts/os/common_configure_spark.py   |  16 +-
 .../scripts/os/configure_proxy_for_docker.py       |  24 +-
 .../os/deeplearning_install_dataengine_kernels.py  |  30 +-
 .../general/scripts/os/install_additional_libs.py  |   2 +-
 .../os/jupyter_install_dataengine_kernels.py       |  32 +-
 .../scripts/os/jupyterlab_container_start.py       |   8 +-
 .../src/general/scripts/os/manage_git_creds.py     |   8 +-
 .../src/general/scripts/os/reconfigure_spark.py    |  10 +-
 .../os/rstudio_install_dataengine_kernels.py       |  24 +-
 .../src/general/scripts/os/superset_start.py       |  14 +-
 .../tensor-rstudio_install_dataengine_kernels.py   |  24 +-
 .../os/tensor_install_dataengine_kernels.py        |  28 +-
 .../scripts/os/update_inactivity_on_start.py       |   2 +-
 .../os/zeppelin_install_dataengine_kernels.py      |  30 +-
 .../src/jupyter/scripts/configure_jupyter_node.py  |   4 +-
 .../scripts/configure_jupyterlab_node.py           |   2 +-
 .../src/rstudio/scripts/configure_rstudio_node.py  |   2 +-
 .../src/ssn/scripts/configure_docker.py            |  80 +--
 .../src/ssn/scripts/configure_gitlab.py            |  46 +-
 .../src/ssn/scripts/configure_ssn_node.py          | 150 +++---
 .../src/ssn/scripts/configure_ui.py                |  76 +--
 .../src/ssn/scripts/upload_response_file.py        |   8 +-
 .../superset/scripts/configure_superset_node.py    |   2 +-
 .../scripts/configure_tensor-rstudio_node.py       |   2 +-
 .../src/tensor/scripts/configure_tensor_node.py    |   2 +-
 .../zeppelin/scripts/configure_zeppelin_node.py    | 160 +++---
 .../epam/datalab/backendapi/dao/RequestIdDAO.java  |   2 +-
 .../epam/datalab/backendapi/domain/RequestId.java  |   4 +-
 63 files changed, 2086 insertions(+), 2086 deletions(-)

diff --git a/README.md b/README.md
index 24d52b5..9981194 100644
--- a/README.md
+++ b/README.md
@@ -2585,7 +2585,7 @@ if __name__ == "__main__":
     print("Configuring notebook server.")
     try:
         if not exists('/home/' + args.os_user + '/.ensure_dir'):
-            sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
+            conn.sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
     except:
         sys.exit(1)
 
diff --git a/infrastructure-provisioning/scripts/deploy_keycloak/deploy_keycloak.py b/infrastructure-provisioning/scripts/deploy_keycloak/deploy_keycloak.py
index 1f1922d..0d3849d 100644
--- a/infrastructure-provisioning/scripts/deploy_keycloak/deploy_keycloak.py
+++ b/infrastructure-provisioning/scripts/deploy_keycloak/deploy_keycloak.py
@@ -46,42 +46,42 @@ private_ip_address = "127.0.0.1"
 def ensure_jre_jdk(os_user):
     if not exists('/home/' + os_user + '/.ensure_dir/jre_jdk_ensured'):
         try:
-            sudo('mkdir -p /home/' + os_user + '/.ensure_dir')
-            sudo('apt-get update')
-            sudo('apt-get install -y default-jre')
-            sudo('apt-get install -y default-jdk')
-            sudo('touch /home/' + os_user + '/.ensure_dir/jre_jdk_ensured')
+            conn.sudo('mkdir -p /home/' + os_user + '/.ensure_dir')
+            conn.sudo('apt-get update')
+            conn.sudo('apt-get install -y default-jre')
+            conn.sudo('apt-get install -y default-jdk')
+            conn.sudo('touch /home/' + os_user + '/.ensure_dir/jre_jdk_ensured')
         except:
             sys.exit(1)
 
 def configure_keycloak():
-    sudo('wget https://downloads.jboss.org/keycloak/' + keycloak_version + '/keycloak-' + keycloak_version + '.tar.gz -O /tmp/keycloak-' + keycloak_version + '.tar.gz')
-    sudo('tar -zxvf /tmp/keycloak-' + keycloak_version + '.tar.gz -C /opt/')
-    sudo('ln -s /opt/keycloak-' + keycloak_version + ' /opt/keycloak')
-    sudo('chown ' + args.os_user + ':' + args.os_user + ' -R /opt/keycloak-' + keycloak_version)
-    sudo('/opt/keycloak/bin/add-user-keycloak.sh -r master -u ' + args.keycloak_user + ' -p ' + args.keycloak_user_password) #create initial admin user in master realm
-    put(templates_dir + 'realm.json', '/tmp/' + args.keycloak_realm_name + '-realm.json')
-    put(templates_dir + 'keycloak.service', '/tmp/keycloak.service')
-    sudo("cp /tmp/keycloak.service /etc/systemd/system/keycloak.service")
-    sudo("sed -i 's|realm-name|" + args.keycloak_realm_name + "|' /tmp/" + args.keycloak_realm_name + "-realm.json")
-    sudo("sed -i 's|OS_USER|" + args.os_user + "|' /etc/systemd/system/keycloak.service")
-    sudo("sed -i 's|private_ip_address|" + private_ip_address + "|' /etc/systemd/system/keycloak.service")
-    sudo("sed -i 's|keycloak_realm_name|" + args.keycloak_realm_name + "|' /etc/systemd/system/keycloak.service")
-    sudo("systemctl daemon-reload")
-    sudo("systemctl enable keycloak")
-    sudo("systemctl start keycloak")
+    conn.sudo('wget https://downloads.jboss.org/keycloak/' + keycloak_version + '/keycloak-' + keycloak_version + '.tar.gz -O /tmp/keycloak-' + keycloak_version + '.tar.gz')
+    conn.sudo('tar -zxvf /tmp/keycloak-' + keycloak_version + '.tar.gz -C /opt/')
+    conn.sudo('ln -s /opt/keycloak-' + keycloak_version + ' /opt/keycloak')
+    conn.sudo('chown ' + args.os_user + ':' + args.os_user + ' -R /opt/keycloak-' + keycloak_version)
+    conn.sudo('/opt/keycloak/bin/add-user-keycloak.sh -r master -u ' + args.keycloak_user + ' -p ' + args.keycloak_user_password) #create initial admin user in master realm
+    conn.put(templates_dir + 'realm.json', '/tmp/' + args.keycloak_realm_name + '-realm.json')
+    conn.put(templates_dir + 'keycloak.service', '/tmp/keycloak.service')
+    conn.sudo("cp /tmp/keycloak.service /etc/systemd/system/keycloak.service")
+    conn.sudo("sed -i 's|realm-name|" + args.keycloak_realm_name + "|' /tmp/" + args.keycloak_realm_name + "-realm.json")
+    conn.sudo("sed -i 's|OS_USER|" + args.os_user + "|' /etc/systemd/system/keycloak.service")
+    conn.sudo("sed -i 's|private_ip_address|" + private_ip_address + "|' /etc/systemd/system/keycloak.service")
+    conn.sudo("sed -i 's|keycloak_realm_name|" + args.keycloak_realm_name + "|' /etc/systemd/system/keycloak.service")
+    conn.sudo("systemctl daemon-reload")
+    conn.sudo("systemctl enable keycloak")
+    conn.sudo("systemctl start keycloak")
 
 def configure_nginx():
-    sudo('apt install -y nginx')
-    put(templates_dir + 'nginx.conf', '/tmp/nginx.conf')
-    sudo("cp /tmp/nginx.conf /etc/nginx/conf.d/nginx.conf")
-    sudo("sed -i 's|80|81|' /etc/nginx/sites-enabled/default")
-    sudo("sed -i 's|external_port|" + external_port + "|' /etc/nginx/conf.d/nginx.conf")
-    sudo("sed -i 's|internal_port|" + internal_port + "|' /etc/nginx/conf.d/nginx.conf")
-    sudo("sed -i 's|private_ip_address|" + private_ip_address + "|' /etc/nginx/conf.d/nginx.conf")
-    sudo("systemctl daemon-reload")
-    sudo("systemctl enable nginx")
-    sudo("systemctl restart nginx")
+    conn.sudo('apt install -y nginx')
+    conn.put(templates_dir + 'nginx.conf', '/tmp/nginx.conf')
+    conn.sudo("cp /tmp/nginx.conf /etc/nginx/conf.d/nginx.conf")
+    conn.sudo("sed -i 's|80|81|' /etc/nginx/sites-enabled/default")
+    conn.sudo("sed -i 's|external_port|" + external_port + "|' /etc/nginx/conf.d/nginx.conf")
+    conn.sudo("sed -i 's|internal_port|" + internal_port + "|' /etc/nginx/conf.d/nginx.conf")
+    conn.sudo("sed -i 's|private_ip_address|" + private_ip_address + "|' /etc/nginx/conf.d/nginx.conf")
+    conn.sudo("systemctl daemon-reload")
+    conn.sudo("systemctl enable nginx")
+    conn.sudo("systemctl restart nginx")
 
 if __name__ == "__main__":
     local("sudo mkdir /logs/keycloak -p")
diff --git a/infrastructure-provisioning/scripts/deploy_repository/deploy_repository.py b/infrastructure-provisioning/scripts/deploy_repository/deploy_repository.py
index e267ced..12688a2 100644
--- a/infrastructure-provisioning/scripts/deploy_repository/deploy_repository.py
+++ b/infrastructure-provisioning/scripts/deploy_repository/deploy_repository.py
@@ -781,17 +781,17 @@ def remove_efs():
 def ensure_ssh_user(initial_user):
     try:
         if not exists('/home/{}/.ssh_user_ensured'.format(initial_user)):
-            sudo('useradd -m -G sudo -s /bin/bash {0}'.format(configuration['conf_os_user']))
-            sudo('echo "{} ALL = NOPASSWD:ALL" >> /etc/sudoers'.format(configuration['conf_os_user']))
-            sudo('mkdir /home/{}/.ssh'.format(configuration['conf_os_user']))
-            sudo('chown -R {0}:{0} /home/{1}/.ssh/'.format(initial_user, configuration['conf_os_user']))
-            sudo('cat /home/{0}/.ssh/authorized_keys > /home/{1}/.ssh/authorized_keys'.format(
+            conn.sudo('useradd -m -G sudo -s /bin/bash {0}'.format(configuration['conf_os_user']))
+            conn.sudo('echo "{} ALL = NOPASSWD:ALL" >> /etc/sudoers'.format(configuration['conf_os_user']))
+            conn.sudo('mkdir /home/{}/.ssh'.format(configuration['conf_os_user']))
+            conn.sudo('chown -R {0}:{0} /home/{1}/.ssh/'.format(initial_user, configuration['conf_os_user']))
+            conn.sudo('cat /home/{0}/.ssh/authorized_keys > /home/{1}/.ssh/authorized_keys'.format(
                 initial_user, configuration['conf_os_user']))
-            sudo('chown -R {0}:{0} /home/{0}/.ssh/'.format(configuration['conf_os_user']))
-            sudo('chmod 700 /home/{0}/.ssh'.format(configuration['conf_os_user']))
-            sudo('chmod 600 /home/{0}/.ssh/authorized_keys'.format(configuration['conf_os_user']))
-            sudo('mkdir /home/{}/.ensure_dir'.format(configuration['conf_os_user']))
-            sudo('touch /home/{}/.ssh_user_ensured'.format(initial_user))
+            conn.sudo('chown -R {0}:{0} /home/{0}/.ssh/'.format(configuration['conf_os_user']))
+            conn.sudo('chmod 700 /home/{0}/.ssh'.format(configuration['conf_os_user']))
+            conn.sudo('chmod 600 /home/{0}/.ssh/authorized_keys'.format(configuration['conf_os_user']))
+            conn.sudo('mkdir /home/{}/.ensure_dir'.format(configuration['conf_os_user']))
+            conn.sudo('touch /home/{}/.ssh_user_ensured'.format(initial_user))
     except Exception as err:
         traceback.print_exc(file=sys.stdout)
         print('Error with creating datalab-user: {}'.format(str(err)))
@@ -801,9 +801,9 @@ def ensure_ssh_user(initial_user):
 def install_java():
     try:
         if not exists('/home/{}/.ensure_dir/java_ensured'.format(configuration['conf_os_user'])):
-            sudo('apt-get update')
-            sudo('apt-get install -y default-jdk ')
-            sudo('touch /home/{}/.ensure_dir/java_ensured'.format(configuration['conf_os_user']))
+            conn.sudo('apt-get update')
+            conn.sudo('apt-get install -y default-jdk ')
+            conn.sudo('touch /home/{}/.ensure_dir/java_ensured'.format(configuration['conf_os_user']))
     except Exception as err:
         traceback.print_exc(file=sys.stdout)
         print('Error with installing Java: {}'.format(str(err)))
@@ -813,15 +813,15 @@ def install_java():
 def install_groovy():
     try:
         if not exists('/home/{}/.ensure_dir/groovy_ensured'.format(configuration['conf_os_user'])):
-            sudo('apt-get install -y unzip')
-            sudo('mkdir /usr/local/groovy')
-            sudo('wget https://bintray.com/artifact/download/groovy/maven/apache-groovy-binary-{0}.zip -O \
+            conn.sudo('apt-get install -y unzip')
+            conn.sudo('mkdir /usr/local/groovy')
+            conn.sudo('wget https://bintray.com/artifact/download/groovy/maven/apache-groovy-binary-{0}.zip -O \
                   /tmp/apache-groovy-binary-{0}.zip'.format(groovy_version))
-            sudo('unzip /tmp/apache-groovy-binary-{}.zip -d \
+            conn.sudo('unzip /tmp/apache-groovy-binary-{}.zip -d \
                   /usr/local/groovy'.format(groovy_version))
-            sudo('ln -s /usr/local/groovy/groovy-{} \
+            conn.sudo('ln -s /usr/local/groovy/groovy-{} \
                   /usr/local/groovy/latest'.format(groovy_version))
-            sudo('touch /home/{}/.ensure_dir/groovy_ensured'.format(configuration['conf_os_user']))
+            conn.sudo('touch /home/{}/.ensure_dir/groovy_ensured'.format(configuration['conf_os_user']))
     except Exception as err:
         traceback.print_exc(file=sys.stdout)
         print('Error with installing Groovy: {}'.format(str(err)))
@@ -835,7 +835,7 @@ def nexus_service_waiter():
         while not nexus_started and checks_count < 200:
             print('Waiting nexus to be started...')
             time.sleep(5)
-            result = sudo('nmap -p 8443 localhost | grep closed > /dev/null ; echo $?')
+            result = conn.sudo('nmap -p 8443 localhost | grep closed > /dev/null ; echo $?')
             result = result[:1]
             if result == '1':
                 nexus_started = True
@@ -853,178 +853,178 @@ def install_nexus():
                 mounting_disks()
             else:
                 mount_efs()
-            sudo('apt-get install -y maven nmap python-pip')
-            sudo('pip2 install -UI pip')
-            sudo('pip2 install -U fabric==1.14.0')
-            sudo('mkdir -p /opt/nexus')
-            sudo('wget https://sonatype-download.global.ssl.fastly.net/nexus/{0}/nexus-{1}-unix.tar.gz -O \
+            conn.sudo('apt-get install -y maven nmap python-pip')
+            conn.sudo('pip2 install -UI pip')
+            conn.sudo('pip2 install -U fabric==1.14.0')
+            conn.sudo('mkdir -p /opt/nexus')
+            conn.sudo('wget https://sonatype-download.global.ssl.fastly.net/nexus/{0}/nexus-{1}-unix.tar.gz -O \
                   /opt/nexus-{1}-unix.tar.gz'.format(
                   nexus_version.split('.')[0], nexus_version))
-            sudo('tar -zhxvf /opt/nexus-{}-unix.tar.gz -C /opt/'.format(
+            conn.sudo('tar -zhxvf /opt/nexus-{}-unix.tar.gz -C /opt/'.format(
                   nexus_version))
-            sudo('mv /opt/nexus-{}/* /opt/nexus/'.format(nexus_version))
-            sudo('mv /opt/nexus-{}/.[!.]* /opt/nexus/'.format(
+            conn.sudo('mv /opt/nexus-{}/* /opt/nexus/'.format(nexus_version))
+            conn.sudo('mv /opt/nexus-{}/.[!.]* /opt/nexus/'.format(
                   nexus_version))
-            sudo('rm -rf /opt/nexus-{}'.format(nexus_version))
-            sudo('useradd nexus')
-            sudo('echo \"run_as_user="nexus"\" > /opt/nexus/bin/nexus.rc')
+            conn.sudo('rm -rf /opt/nexus-{}'.format(nexus_version))
+            conn.sudo('useradd nexus')
+            conn.sudo('echo \"run_as_user="nexus"\" > /opt/nexus/bin/nexus.rc')
             create_keystore()
-            put('templates/jetty-https.xml', '/tmp/jetty-https.xml')
-            sudo('sed -i "s/KEYSTORE_PASSWORD/{}/g" /tmp/jetty-https.xml'.format(keystore_pass))
-            sudo('cp -f /tmp/jetty-https.xml /opt/nexus/etc/jetty/')
-            put('templates/nexus.service', '/tmp/nexus.service')
+            conn.put('templates/jetty-https.xml', '/tmp/jetty-https.xml')
+            conn.sudo('sed -i "s/KEYSTORE_PASSWORD/{}/g" /tmp/jetty-https.xml'.format(keystore_pass))
+            conn.sudo('cp -f /tmp/jetty-https.xml /opt/nexus/etc/jetty/')
+            conn.put('templates/nexus.service', '/tmp/nexus.service')
             if args.efs_enabled == 'False':
-                sudo('sed -i "s|EFS_SERVICE||g" /tmp/nexus.service')
+                conn.sudo('sed -i "s|EFS_SERVICE||g" /tmp/nexus.service')
             else:
-                sudo('sed -i "s|EFS_SERVICE|mount-efs-sequentially.service|g" /tmp/nexus.service')
-            sudo('cp /tmp/nexus.service /etc/systemd/system/')
-            put('files/nexus.properties', '/tmp/nexus.properties')
-            sudo('mkdir -p /opt/sonatype-work/nexus3/etc')
-            sudo('cp -f /tmp/nexus.properties /opt/sonatype-work/nexus3/etc/nexus.properties')
-            sudo('chown -R nexus:nexus /opt/nexus /opt/sonatype-work')
-            sudo('systemctl daemon-reload')
-            sudo('systemctl start nexus')
+                conn.sudo('sed -i "s|EFS_SERVICE|mount-efs-sequentially.service|g" /tmp/nexus.service')
+            conn.sudo('cp /tmp/nexus.service /etc/systemd/system/')
+            conn.put('files/nexus.properties', '/tmp/nexus.properties')
+            conn.sudo('mkdir -p /opt/sonatype-work/nexus3/etc')
+            conn.sudo('cp -f /tmp/nexus.properties /opt/sonatype-work/nexus3/etc/nexus.properties')
+            conn.sudo('chown -R nexus:nexus /opt/nexus /opt/sonatype-work')
+            conn.sudo('systemctl daemon-reload')
+            conn.sudo('systemctl start nexus')
             nexus_service_waiter()
-            sudo('systemctl enable nexus')
-            put('templates/configureNexus.groovy', '/tmp/configureNexus.groovy')
-            sudo('sed -i "s/REGION/{}/g" /tmp/configureNexus.groovy'.format(args.region))
-            sudo('sed -i "s/ADMIN_PASSWORD/{}/g" /tmp/configureNexus.groovy'.format(args.nexus_admin_password))
-            sudo('sed -i "s/SERVICE_USER_NAME/{}/g" /tmp/configureNexus.groovy'.format(args.nexus_service_user_name))
-            sudo('sed -i "s/SERVICE_USER_PASSWORD/{}/g" /tmp/configureNexus.groovy'.format(
+            conn.sudo('systemctl enable nexus')
+            conn.put('templates/configureNexus.groovy', '/tmp/configureNexus.groovy')
+            conn.sudo('sed -i "s/REGION/{}/g" /tmp/configureNexus.groovy'.format(args.region))
+            conn.sudo('sed -i "s/ADMIN_PASSWORD/{}/g" /tmp/configureNexus.groovy'.format(args.nexus_admin_password))
+            conn.sudo('sed -i "s/SERVICE_USER_NAME/{}/g" /tmp/configureNexus.groovy'.format(args.nexus_service_user_name))
+            conn.sudo('sed -i "s/SERVICE_USER_PASSWORD/{}/g" /tmp/configureNexus.groovy'.format(
                 args.nexus_service_user_password))
-            sudo('wget http://repo.{}.amazonaws.com/2017.09/main/mirror.list -O /tmp/main_mirror.list'.format(
+            conn.sudo('wget http://repo.{}.amazonaws.com/2017.09/main/mirror.list -O /tmp/main_mirror.list'.format(
                 args.region))
-            sudo('wget http://repo.{}.amazonaws.com/2017.09/updates/mirror.list -O /tmp/updates_mirror.list'.format(
+            conn.sudo('wget http://repo.{}.amazonaws.com/2017.09/updates/mirror.list -O /tmp/updates_mirror.list'.format(
                 args.region))
-            amazon_main_repo = sudo("cat /tmp/main_mirror.list  | grep {} | sed 's/$basearch//g'".format(args.region))
-            amazon_updates_repo = sudo("cat /tmp/updates_mirror.list  | grep {} | sed 's/$basearch//g'".format(
+            amazon_main_repo = conn.sudo("cat /tmp/main_mirror.list  | grep {} | sed 's/$basearch//g'".format(args.region))
+            amazon_updates_repo = conn.sudo("cat /tmp/updates_mirror.list  | grep {} | sed 's/$basearch//g'".format(
                 args.region))
-            sudo('sed -i "s|AMAZON_MAIN_URL|{}|g" /tmp/configureNexus.groovy'.format(amazon_main_repo))
-            sudo('sed -i "s|AMAZON_UPDATES_URL|{}|g" /tmp/configureNexus.groovy'.format(amazon_updates_repo))
-            sudo('rm -f /tmp/main_mirror.list')
-            sudo('rm -f /tmp/updates_mirror.list')
-            put('scripts/addUpdateScript.groovy', '/tmp/addUpdateScript.groovy')
+            conn.sudo('sed -i "s|AMAZON_MAIN_URL|{}|g" /tmp/configureNexus.groovy'.format(amazon_main_repo))
+            conn.sudo('sed -i "s|AMAZON_UPDATES_URL|{}|g" /tmp/configureNexus.groovy'.format(amazon_updates_repo))
+            conn.sudo('rm -f /tmp/main_mirror.list')
+            conn.sudo('rm -f /tmp/updates_mirror.list')
+            conn.put('scripts/addUpdateScript.groovy', '/tmp/addUpdateScript.groovy')
             script_executed = False
             while not script_executed:
                 try:
-                    sudo('/usr/local/groovy/latest/bin/groovy /tmp/addUpdateScript.groovy -u "admin" -p "admin123" \
+                    conn.sudo('/usr/local/groovy/latest/bin/groovy /tmp/addUpdateScript.groovy -u "admin" -p "admin123" \
                           -n "configureNexus" -f "/tmp/configureNexus.groovy" -h "http://localhost:8081"')
                     script_executed = True
                 except:
                     time.sleep(10)
                     pass
-            sudo('curl -u admin:admin123 -X POST --header \'Content-Type: text/plain\' \
+            conn.sudo('curl -u admin:admin123 -X POST --header \'Content-Type: text/plain\' \
                    http://localhost:8081/service/rest/v1/script/configureNexus/run')
-            sudo('systemctl stop nexus')
-            sudo('git clone https://github.com/sonatype-nexus-community/nexus-repository-apt')
+            conn.sudo('systemctl stop nexus')
+            conn.sudo('git clone https://github.com/sonatype-nexus-community/nexus-repository-apt')
             with cd('nexus-repository-apt'):
-                sudo('mvn')
-            apt_plugin_version = sudo('find nexus-repository-apt/ -name "nexus-repository-apt-*.jar" '
+                conn.sudo('mvn')
+            apt_plugin_version = conn.sudo('find nexus-repository-apt/ -name "nexus-repository-apt-*.jar" '
                                       '-printf "%f\\n" | grep -v "sources"').replace('nexus-repository-apt-',
                                                                                      '').replace('.jar', '')
-            compress_plugin_version = sudo('find /opt/nexus/ -name "commons-compress-*.jar" '
+            compress_plugin_version = conn.sudo('find /opt/nexus/ -name "commons-compress-*.jar" '
                                            '-printf "%f\\n" ').replace('commons-compress-', '').replace('.jar', '')
-            xz_plugin_version = sudo('find /opt/nexus/ -name "xz-*.jar" '
+            xz_plugin_version = conn.sudo('find /opt/nexus/ -name "xz-*.jar" '
                                      '-printf "%f\\n" ').replace('xz-', '').replace('.jar', '')
-            sudo('mkdir -p /opt/nexus/system/net/staticsnow/nexus-repository-apt/{0}/'.format(apt_plugin_version))
-            apt_plugin_jar_path = sudo('find nexus-repository-apt/ -name "nexus-repository-apt-{0}.jar"'.format(
+            conn.sudo('mkdir -p /opt/nexus/system/net/staticsnow/nexus-repository-apt/{0}/'.format(apt_plugin_version))
+            apt_plugin_jar_path = conn.sudo('find nexus-repository-apt/ -name "nexus-repository-apt-{0}.jar"'.format(
                 apt_plugin_version))
-            sudo('cp -f {0} /opt/nexus/system/net/staticsnow/nexus-repository-apt/{1}/'.format(
+            conn.sudo('cp -f {0} /opt/nexus/system/net/staticsnow/nexus-repository-apt/{1}/'.format(
                 apt_plugin_jar_path, apt_plugin_version
             ))
-            sudo('sed -i "$ d" /opt/nexus/system/org/sonatype/nexus/assemblies/nexus-core-feature/{0}/'
+            conn.sudo('sed -i "$ d" /opt/nexus/system/org/sonatype/nexus/assemblies/nexus-core-feature/{0}/'
                  'nexus-core-feature-{0}-features.xml'.format(nexus_version))
-            sudo('''echo '<feature name="nexus-repository-apt" description="net.staticsnow:nexus-repository-apt" '''
+            conn.sudo('''echo '<feature name="nexus-repository-apt" description="net.staticsnow:nexus-repository-apt" '''
                  '''version="{1}">' >> /opt/nexus/system/org/sonatype/nexus/assemblies/nexus-core-feature/{0}/'''
                  '''nexus-core-feature-{0}-features.xml'''.format(nexus_version, apt_plugin_version))
-            sudo('''echo '<details>net.staticsnow:nexus-repository-apt</details>' >> '''
+            conn.sudo('''echo '<details>net.staticsnow:nexus-repository-apt</details>' >> '''
                  '''/opt/nexus/system/org/sonatype/nexus/assemblies/nexus-core-feature/{0}/'''
                  '''nexus-core-feature-{0}-features.xml'''.format(nexus_version))
-            sudo('''echo '<bundle>mvn:net.staticsnow/nexus-repository-apt/{1}</bundle>' >> '''
+            conn.sudo('''echo '<bundle>mvn:net.staticsnow/nexus-repository-apt/{1}</bundle>' >> '''
                  '''/opt/nexus/system/org/sonatype/nexus/assemblies/nexus-core-feature/{0}/'''
                  '''nexus-core-feature-{0}-features.xml'''.format(nexus_version, apt_plugin_version))
-            sudo('''echo '<bundle>mvn:org.apache.commons/commons-compress/{1}</bundle>' >> '''
+            conn.sudo('''echo '<bundle>mvn:org.apache.commons/commons-compress/{1}</bundle>' >> '''
                  '''/opt/nexus/system/org/sonatype/nexus/assemblies/nexus-core-feature/{0}/'''
                  '''nexus-core-feature-{0}-features.xml'''.format(nexus_version, compress_plugin_version))
-            sudo('''echo '<bundle>mvn:org.tukaani/xz/{1}</bundle>' >> '''
+            conn.sudo('''echo '<bundle>mvn:org.tukaani/xz/{1}</bundle>' >> '''
                  '''/opt/nexus/system/org/sonatype/nexus/assemblies/nexus-core-feature/{0}/'''
                  '''nexus-core-feature-{0}-features.xml'''.format(nexus_version, xz_plugin_version))
-            sudo('''echo '</feature>' >> '''
+            conn.sudo('''echo '</feature>' >> '''
                  '''/opt/nexus/system/org/sonatype/nexus/assemblies/nexus-core-feature/{0}/'''
                  '''nexus-core-feature-{0}-features.xml'''.format(nexus_version))
-            sudo('''echo '</features>' >> '''
+            conn.sudo('''echo '</features>' >> '''
                  '''/opt/nexus/system/org/sonatype/nexus/assemblies/nexus-core-feature/{0}/'''
                  '''nexus-core-feature-{0}-features.xml'''.format(nexus_version))
-            sudo('''sed -i 's|<feature prerequisite=\"true\" dependency=\"false\">wrap</feature>|'''
+            conn.sudo('''sed -i 's|<feature prerequisite=\"true\" dependency=\"false\">wrap</feature>|'''
                  '''<feature prerequisite=\"true\" dependency=\"false\">wrap</feature>\\n'''
                  '''<feature prerequisite=\"false\" dependency=\"false\">nexus-repository-apt</feature>|g' '''
                  '''/opt/nexus/system/org/sonatype/nexus/assemblies/nexus-core-feature/{0}/nexus-core-feature-'''
                  '''{0}-features.xml'''.format(nexus_version))
-            sudo('git clone https://github.com/sonatype-nexus-community/nexus-repository-r.git')
+            conn.sudo('git clone https://github.com/sonatype-nexus-community/nexus-repository-r.git')
             with cd('nexus-repository-r'):
-                sudo('mvn clean install')
-            r_plugin_version = sudo('find nexus-repository-r/ -name "nexus-repository-r-*.jar" '
+                conn.sudo('mvn clean install')
+            r_plugin_version = conn.sudo('find nexus-repository-r/ -name "nexus-repository-r-*.jar" '
                                     '-printf "%f\\n" | grep -v "sources"').replace('nexus-repository-r-', '').replace(
                 '.jar', '')
-            sudo('mkdir -p /opt/nexus/system/org/sonatype/nexus/plugins/nexus-repository-r/{}/'.format(
+            conn.sudo('mkdir -p /opt/nexus/system/org/sonatype/nexus/plugins/nexus-repository-r/{}/'.format(
                 r_plugin_version))
-            r_plugin_jar_path = sudo('find nexus-repository-r/ -name "nexus-repository-r-{0}.jar"'.format(
+            r_plugin_jar_path = conn.sudo('find nexus-repository-r/ -name "nexus-repository-r-{0}.jar"'.format(
                 r_plugin_version))
-            sudo('cp -f {0} /opt/nexus/system/org/sonatype/nexus/plugins/nexus-repository-r/{1}/'.format(
+            conn.sudo('cp -f {0} /opt/nexus/system/org/sonatype/nexus/plugins/nexus-repository-r/{1}/'.format(
                 r_plugin_jar_path, r_plugin_version
             ))
-            sudo('sed -i "$ d" /opt/nexus/system/com/sonatype/nexus/assemblies/nexus-oss-feature/{0}/'
+            conn.sudo('sed -i "$ d" /opt/nexus/system/com/sonatype/nexus/assemblies/nexus-oss-feature/{0}/'
                  'nexus-oss-feature-{0}-features.xml'.format(nexus_version))
-            sudo('''echo '<feature name="nexus-repository-r" description="org.sonatype.nexus.plugins:'''
+            conn.sudo('''echo '<feature name="nexus-repository-r" description="org.sonatype.nexus.plugins:'''
                  '''nexus-repository-r" version="{1}">' >> /opt/nexus/system/com/sonatype/nexus/assemblies/'''
                  '''nexus-oss-feature/{0}/nexus-oss-feature-{0}-features.xml'''.format(nexus_version, r_plugin_version))
-            sudo('''echo '<details>org.sonatype.nexus.plugins:nexus-repository-r</details>' >> '''
+            conn.sudo('''echo '<details>org.sonatype.nexus.plugins:nexus-repository-r</details>' >> '''
                  '''/opt/nexus/system/com/sonatype/nexus/assemblies/nexus-oss-feature/{0}/'''
                  '''nexus-oss-feature-{0}-features.xml'''.format(nexus_version))
-            sudo('''echo '<bundle>mvn:org.sonatype.nexus.plugins/nexus-repository-r/{1}</bundle>' >> '''
+            conn.sudo('''echo '<bundle>mvn:org.sonatype.nexus.plugins/nexus-repository-r/{1}</bundle>' >> '''
                  '''/opt/nexus/system/com/sonatype/nexus/assemblies/nexus-oss-feature/{0}/'''
                  '''nexus-oss-feature-{0}-features.xml'''.format(nexus_version, r_plugin_version))
-            sudo('''echo '</feature>' >> '''
+            conn.sudo('''echo '</feature>' >> '''
                  '''/opt/nexus/system/com/sonatype/nexus/assemblies/nexus-oss-feature/{0}/'''
                  '''nexus-oss-feature-{0}-features.xml'''.format(nexus_version))
-            sudo('''echo '</features>' >> '''
+            conn.sudo('''echo '</features>' >> '''
                  '''/opt/nexus/system/com/sonatype/nexus/assemblies/nexus-oss-feature/{0}/'''
                  '''nexus-oss-feature-{0}-features.xml'''.format(nexus_version))
-            sudo('''sed -i 's|<feature prerequisite=\"true\" dependency=\"false\">wrap</feature>|'''
+            conn.sudo('''sed -i 's|<feature prerequisite=\"true\" dependency=\"false\">wrap</feature>|'''
                  '''<feature prerequisite=\"true\" dependency=\"false\">wrap</feature>\\n'''
                  '''<feature version=\"{1}\" prerequisite=\"false\" dependency=\"false\">'''
                  '''nexus-repository-r</feature>|g' '''
                  '''/opt/nexus/system/com/sonatype/nexus/assemblies/nexus-oss-feature/{0}/'''
                  '''nexus-oss-feature-{0}-features.xml'''.format(nexus_version, r_plugin_version))
-            sudo('chown -R nexus:nexus /opt/nexus')
-            sudo('systemctl start nexus')
+            conn.sudo('chown -R nexus:nexus /opt/nexus')
+            conn.sudo('systemctl start nexus')
             nexus_service_waiter()
-            put('templates/addCustomRepository.groovy', '/tmp/addCustomRepository.groovy')
-            sudo('sed -i "s|REGION|{0}|g" /tmp/addCustomRepository.groovy'.format(args.region))
+            conn.put('templates/addCustomRepository.groovy', '/tmp/addCustomRepository.groovy')
+            conn.sudo('sed -i "s|REGION|{0}|g" /tmp/addCustomRepository.groovy'.format(args.region))
             script_executed = False
             while not script_executed:
                 try:
-                    sudo('/usr/local/groovy/latest/bin/groovy /tmp/addUpdateScript.groovy -u "admin" -p "{}" '
+                    conn.sudo('/usr/local/groovy/latest/bin/groovy /tmp/addUpdateScript.groovy -u "admin" -p "{}" '
                          '-n "addCustomRepository" -f "/tmp/addCustomRepository.groovy" -h '
                          '"http://localhost:8081"'.format(args.nexus_admin_password))
                     script_executed = True
                 except:
                     time.sleep(10)
                     pass
-            sudo('curl -u admin:{} -X POST --header \'Content-Type: text/plain\' '
+            conn.sudo('curl -u admin:{} -X POST --header \'Content-Type: text/plain\' '
                  'http://localhost:8081/service/rest/v1/script/addCustomRepository/run'.format(
                   args.nexus_admin_password))
-            sudo('echo "admin:{}" > /opt/nexus/credentials'.format(args.nexus_admin_password))
-            sudo('echo "{0}:{1}" >> /opt/nexus/credentials'.format(args.nexus_service_user_name,
+            conn.sudo('echo "admin:{}" > /opt/nexus/credentials'.format(args.nexus_admin_password))
+            conn.sudo('echo "{0}:{1}" >> /opt/nexus/credentials'.format(args.nexus_service_user_name,
                                                                    args.nexus_service_user_password))
-            put('templates/updateRepositories.groovy', '/opt/nexus/updateRepositories.groovy', use_sudo=True)
-            put('scripts/update_amazon_repositories.py', '/opt/nexus/update_amazon_repositories.py', use_sudo=True)
-            sudo('sed -i "s|NEXUS_PASSWORD|{}|g" /opt/nexus/update_amazon_repositories.py'.format(
+            conn.put('templates/updateRepositories.groovy', '/opt/nexus/updateRepositories.groovy', use_sudo=True)
+            conn.put('scripts/update_amazon_repositories.py', '/opt/nexus/update_amazon_repositories.py', use_sudo=True)
+            conn.sudo('sed -i "s|NEXUS_PASSWORD|{}|g" /opt/nexus/update_amazon_repositories.py'.format(
                  args.nexus_admin_password))
-            sudo('touch /var/log/amazon_repo_update.log')
-            sudo('echo "0 0 * * * root /usr/bin/python /opt/nexus/update_amazon_repositories.py --region {} >> '
+            conn.sudo('touch /var/log/amazon_repo_update.log')
+            conn.sudo('echo "0 0 * * * root /usr/bin/python /opt/nexus/update_amazon_repositories.py --region {} >> '
                  '/var/log/amazon_repo_update.log" >> /etc/crontab'.format(args.region))
-            sudo('touch /home/{}/.ensure_dir/nexus_ensured'.format(configuration['conf_os_user']))
+            conn.sudo('touch /home/{}/.ensure_dir/nexus_ensured'.format(configuration['conf_os_user']))
     except Exception as err:
         traceback.print_exc(file=sys.stdout)
         print('Error with installing Nexus: {}'.format(str(err)))
@@ -1034,20 +1034,20 @@ def install_nexus():
 def install_nginx():
     try:
         if not exists('/home/{}/.ensure_dir/nginx_ensured'.format(configuration['conf_os_user'])):
-            hostname = sudo('hostname')
-            sudo('apt-get install -y nginx')
-            sudo('rm -f /etc/nginx/conf.d/* /etc/nginx/sites-enabled/default')
-            put('templates/nexus.conf', '/tmp/nexus.conf')
+            hostname = conn.sudo('hostname')
+            conn.sudo('apt-get install -y nginx')
+            conn.sudo('rm -f /etc/nginx/conf.d/* /etc/nginx/sites-enabled/default')
+            conn.put('templates/nexus.conf', '/tmp/nexus.conf')
             if args.hosted_zone_id and args.hosted_zone_name and args.subdomain:
-                sudo('sed -i "s|SUBDOMAIN|{}|g" /tmp/nexus.conf'.format(args.subdomain))
-                sudo('sed -i "s|HOSTZONE|{}|g" /tmp/nexus.conf'.format(args.hosted_zone_name))
+                conn.sudo('sed -i "s|SUBDOMAIN|{}|g" /tmp/nexus.conf'.format(args.subdomain))
+                conn.sudo('sed -i "s|HOSTZONE|{}|g" /tmp/nexus.conf'.format(args.hosted_zone_name))
             else:
-                sudo('sed -i "s|SUBDOMAIN.HOSTZONE|{}|g" /tmp/nexus.conf'.format(hostname))
-            sudo('sed -i "s|REGION|{}|g" /tmp/nexus.conf'.format(args.region))
-            sudo('cp /tmp/nexus.conf /etc/nginx/conf.d/nexus.conf'.format(args.subdomain, args.hosted_zone_name))
-            sudo('systemctl restart nginx')
-            sudo('systemctl enable nginx')
-            sudo('touch /home/{}/.ensure_dir/nginx_ensured'.format(configuration['conf_os_user']))
+                conn.sudo('sed -i "s|SUBDOMAIN.HOSTZONE|{}|g" /tmp/nexus.conf'.format(hostname))
+            conn.sudo('sed -i "s|REGION|{}|g" /tmp/nexus.conf'.format(args.region))
+            conn.sudo('cp /tmp/nexus.conf /etc/nginx/conf.d/nexus.conf'.format(args.subdomain, args.hosted_zone_name))
+            conn.sudo('systemctl restart nginx')
+            conn.sudo('systemctl enable nginx')
+            conn.sudo('touch /home/{}/.ensure_dir/nginx_ensured'.format(configuration['conf_os_user']))
     except Exception as err:
         traceback.print_exc(file=sys.stdout)
         print('Error with installing Nginx: {}'.format(str(err)))
@@ -1057,17 +1057,17 @@ def install_nginx():
 def mounting_disks():
     try:
         if not exists('/home/{}/.ensure_dir/additional_disk_mounted'.format(configuration['conf_os_user'])):
-            sudo('mkdir -p /opt/sonatype-work')
-            disk_name = sudo("lsblk | grep disk | awk '{print $1}' | sort | tail -n 1 | tr '\\n' ',' | sed 's|.$||g'")
-            sudo('bash -c \'echo -e "o\nn\np\n1\n\n\nw" | fdisk /dev/{}\' '.format(disk_name))
-            sudo('sleep 10')
-            partition_name = sudo("lsblk -r | grep part | grep {} | awk {} | sort | tail -n 1 | "
+            conn.sudo('mkdir -p /opt/sonatype-work')
+            disk_name = conn.sudo("lsblk | grep disk | awk '{print $1}' | sort | tail -n 1 | tr '\\n' ',' | sed 's|.$||g'")
+            conn.sudo('bash -c \'echo -e "o\nn\np\n1\n\n\nw" | fdisk /dev/{}\' '.format(disk_name))
+            conn.sudo('sleep 10')
+            partition_name = conn.sudo("lsblk -r | grep part | grep {} | awk {} | sort | tail -n 1 | "
                                   "tr '\\n' ',' | sed 's|.$||g'".format(disk_name, "'{print $1}'"))
-            sudo('mkfs.ext4 -F -q /dev/{}'.format(partition_name))
-            sudo('mount /dev/{0} /opt/sonatype-work'.format(partition_name))
-            sudo('bash -c "echo \'/dev/{} /opt/sonatype-work ext4 errors=remount-ro 0 1\' >> /etc/fstab"'.format(
+            conn.sudo('mkfs.ext4 -F -q /dev/{}'.format(partition_name))
+            conn.sudo('mount /dev/{0} /opt/sonatype-work'.format(partition_name))
+            conn.sudo('bash -c "echo \'/dev/{} /opt/sonatype-work ext4 errors=remount-ro 0 1\' >> /etc/fstab"'.format(
                 partition_name))
-            sudo('touch /home/{}/.ensure_dir/additional_disk_mounted'.format(configuration['conf_os_user']))
+            conn.sudo('touch /home/{}/.ensure_dir/additional_disk_mounted'.format(configuration['conf_os_user']))
     except Exception as err:
         traceback.print_exc(file=sys.stdout)
         print('Failed to mount additional volume: {}'.format(str(err)))
@@ -1077,26 +1077,26 @@ def mounting_disks():
 def mount_efs():
     try:
         if not exists('/home/{}/.ensure_dir/efs_mounted'.format(configuration['conf_os_user'])):
-            sudo('mkdir -p /opt/sonatype-work')
-            sudo('apt-get -y install binutils')
+            conn.sudo('mkdir -p /opt/sonatype-work')
+            conn.sudo('apt-get -y install binutils')
             with cd('/tmp/'):
-                sudo('git clone https://github.com/aws/efs-utils')
+                conn.sudo('git clone https://github.com/aws/efs-utils')
             with cd('/tmp/efs-utils'):
-                sudo('./build-deb.sh')
-                sudo('apt-get -y install ./build/amazon-efs-utils*deb')
-            sudo('sed -i "s/stunnel_check_cert_hostname.*/stunnel_check_cert_hostname = false/g" '
+                conn.sudo('./build-deb.sh')
+                conn.sudo('apt-get -y install ./build/amazon-efs-utils*deb')
+            conn.sudo('sed -i "s/stunnel_check_cert_hostname.*/stunnel_check_cert_hostname = false/g" '
                  '/etc/amazon/efs/efs-utils.conf')
-            sudo('sed -i "s/stunnel_check_cert_validity.*/stunnel_check_cert_validity = false/g" '
+            conn.sudo('sed -i "s/stunnel_check_cert_validity.*/stunnel_check_cert_validity = false/g" '
                  '/etc/amazon/efs/efs-utils.conf')
-            sudo('mount -t efs -o tls {}:/ /opt/sonatype-work'.format(
+            conn.sudo('mount -t efs -o tls {}:/ /opt/sonatype-work'.format(
                 args.efs_id))
-            sudo('bash -c "echo \'{}:/ /opt/sonatype-work efs tls,_netdev 0 0\' >> '
+            conn.sudo('bash -c "echo \'{}:/ /opt/sonatype-work efs tls,_netdev 0 0\' >> '
                  '/etc/fstab"'.format(args.efs_id))
-            put('files/mount-efs-sequentially.service', '/tmp/mount-efs-sequentially.service')
-            sudo('cp /tmp/mount-efs-sequentially.service /etc/systemd/system/')
-            sudo('systemctl daemon-reload')
-            sudo('systemctl enable mount-efs-sequentially.service')
-            sudo('touch /home/{}/.ensure_dir/efs_mounted'.format(configuration['conf_os_user']))
+            conn.put('files/mount-efs-sequentially.service', '/tmp/mount-efs-sequentially.service')
+            conn.sudo('cp /tmp/mount-efs-sequentially.service /etc/systemd/system/')
+            conn.sudo('systemctl daemon-reload')
+            conn.sudo('systemctl enable mount-efs-sequentially.service')
+            conn.sudo('touch /home/{}/.ensure_dir/efs_mounted'.format(configuration['conf_os_user']))
     except Exception as err:
         traceback.print_exc()
         print('Failed to mount additional volume: ', str(err))
@@ -1106,20 +1106,20 @@ def mount_efs():
 def configure_ssl():
     try:
         if not exists('/home/{}/.ensure_dir/ssl_ensured'.format(configuration['conf_os_user'])):
-            hostname = sudo('hostname')
-            private_ip = sudo('curl http://169.254.169.254/latest/meta-data/local-ipv4')
+            hostname = conn.sudo('hostname')
+            private_ip = conn.sudo('curl http://169.254.169.254/latest/meta-data/local-ipv4')
             subject_alt_name = 'subjectAltName = IP:{}'.format(private_ip)
             if args.network_type == 'public':
-                public_ip = sudo('curl http://169.254.169.254/latest/meta-data/public-ipv4')
+                public_ip = conn.sudo('curl http://169.254.169.254/latest/meta-data/public-ipv4')
                 subject_alt_name += ',IP:{}'.format(public_ip)
-            sudo('cp /etc/ssl/openssl.cnf /tmp/openssl.cnf')
-            sudo('echo "[ subject_alt_name ]" >> /tmp/openssl.cnf')
-            sudo('echo "{}" >> /tmp/openssl.cnf'.format(subject_alt_name))
-            sudo('openssl req -x509 -nodes -days 3650 -newkey rsa:2048 -keyout /etc/ssl/certs/repository.key '
+            conn.sudo('cp /etc/ssl/openssl.cnf /tmp/openssl.cnf')
+            conn.sudo('echo "[ subject_alt_name ]" >> /tmp/openssl.cnf')
+            conn.sudo('echo "{}" >> /tmp/openssl.cnf'.format(subject_alt_name))
+            conn.sudo('openssl req -x509 -nodes -days 3650 -newkey rsa:2048 -keyout /etc/ssl/certs/repository.key '
                  '-out /etc/ssl/certs/repository.crt -subj "/C=US/ST=US/L=US/O=datalab/CN={}" -config '
                  '/tmp/openssl.cnf -extensions subject_alt_name'.format(hostname))
-            sudo('openssl dhparam -out /etc/ssl/certs/dhparam.pem 2048')
-            sudo('touch /home/{}/.ensure_dir/ssl_ensured'.format(configuration['conf_os_user']))
+            conn.sudo('openssl dhparam -out /etc/ssl/certs/dhparam.pem 2048')
+            conn.sudo('touch /home/{}/.ensure_dir/ssl_ensured'.format(configuration['conf_os_user']))
     except Exception as err:
         traceback.print_exc()
         print('Failed to mount additional volume: ', str(err))
@@ -1133,11 +1133,11 @@ def set_hostname():
                 hostname = '{0}.{1}'.format(args.subdomain, args.hosted_zone_name)
             else:
                 if args.network_type == 'public':
-                    hostname = sudo('curl http://169.254.169.254/latest/meta-data/public-hostname')
+                    hostname = conn.sudo('curl http://169.254.169.254/latest/meta-data/public-hostname')
                 else:
-                    hostname = sudo('curl http://169.254.169.254/latest/meta-data/hostname')
-            sudo('hostnamectl set-hostname {0}'.format(hostname))
-            sudo('touch /home/{}/.ensure_dir/hostname_set'.format(configuration['conf_os_user']))
+                    hostname = conn.sudo('curl http://169.254.169.254/latest/meta-data/hostname')
+            conn.sudo('hostnamectl set-hostname {0}'.format(hostname))
+            conn.sudo('touch /home/{}/.ensure_dir/hostname_set'.format(configuration['conf_os_user']))
     except Exception as err:
         traceback.print_exc()
         print('Failed to mount additional volume: ', str(err))
@@ -1147,11 +1147,11 @@ def set_hostname():
 def create_keystore():
     try:
         if not exists('/home/{}/.ensure_dir/keystore_created'.format(configuration['conf_os_user'])):
-            sudo('openssl pkcs12 -export -in /etc/ssl/certs/repository.crt -inkey /etc/ssl/certs/repository.key '
+            conn.sudo('openssl pkcs12 -export -in /etc/ssl/certs/repository.crt -inkey /etc/ssl/certs/repository.key '
                  '-out wildcard.p12 -passout pass:{}'.format(keystore_pass))
-            sudo('keytool -importkeystore  -deststorepass {0} -destkeypass {0} -srckeystore wildcard.p12 -srcstoretype '
+            conn.sudo('keytool -importkeystore  -deststorepass {0} -destkeypass {0} -srckeystore wildcard.p12 -srcstoretype '
                  'PKCS12 -srcstorepass {0} -destkeystore /opt/nexus/etc/ssl/keystore.jks'.format(keystore_pass))
-            sudo('touch /home/{}/.ensure_dir/keystore_created'.format(configuration['conf_os_user']))
+            conn.sudo('touch /home/{}/.ensure_dir/keystore_created'.format(configuration['conf_os_user']))
     except Exception as err:
         traceback.print_exc()
         print('Failed to create keystore: ', str(err))
@@ -1233,16 +1233,16 @@ def download_packages():
             for package in packages_urls:
                 package_name = package.split('/')[-1]
                 packages_list.append({'url': package, 'name': package_name})
-            run('mkdir packages')
+            conn.run('mkdir packages')
             with cd('packages'):
                 for package in packages_list:
-                    run('wget {0}'.format(package['url']))
-                    run('curl -v -u admin:{2} -F "raw.directory=/" -F '
+                    conn.run('wget {0}'.format(package['url']))
+                    conn.run('curl -v -u admin:{2} -F "raw.directory=/" -F '
                         '"raw.asset1=@/home/{0}/packages/{1}" '
                         '-F "raw.asset1.filename={1}"  '
                         '"http://localhost:8081/service/rest/v1/components?repository=packages"'.format(
                          configuration['conf_os_user'], package['name'], args.nexus_admin_password))
-            sudo('touch /home/{}/.ensure_dir/packages_downloaded'.format(configuration['conf_os_user']))
+            conn.sudo('touch /home/{}/.ensure_dir/packages_downloaded'.format(configuration['conf_os_user']))
     except Exception as err:
         traceback.print_exc()
         print('Failed to download packages: ', str(err))
@@ -1252,16 +1252,16 @@ def download_packages():
 def install_docker():
     try:
         if not exists('/home/{}/.ensure_dir/docker_installed'.format(configuration['conf_os_user'])):
-            sudo('curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -')
-            sudo('add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) '
+            conn.sudo('curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -')
+            conn.sudo('add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) '
                  'stable"')
-            sudo('apt-get update')
-            sudo('apt-cache policy docker-ce')
-            sudo('apt-get install -y docker-ce=5:{}~3-0~ubuntu-focal'.format(configuration['ssn_docker_version']))
-            sudo('usermod -a -G docker ' + configuration['conf_os_user'])
-            sudo('update-rc.d docker defaults')
-            sudo('update-rc.d docker enable')
-            sudo('touch /home/{}/.ensure_dir/docker_installed'.format(configuration['conf_os_user']))
+            conn.sudo('apt-get update')
+            conn.sudo('apt-cache policy docker-ce')
+            conn.sudo('apt-get install -y docker-ce=5:{}~3-0~ubuntu-focal'.format(configuration['ssn_docker_version']))
+            conn.sudo('usermod -a -G docker ' + configuration['conf_os_user'])
+            conn.sudo('update-rc.d docker defaults')
+            conn.sudo('update-rc.d docker enable')
+            conn.sudo('touch /home/{}/.ensure_dir/docker_installed'.format(configuration['conf_os_user']))
     except Exception as err:
         traceback.print_exc()
         print('Failed to install docker: ', str(err))
@@ -1271,14 +1271,14 @@ def install_docker():
 def prepare_images():
     try:
         if not exists('/home/{}/.ensure_dir/images_prepared'.format(configuration['conf_os_user'])):
-            put('files/Dockerfile', '/tmp/Dockerfile')
+            conn.put('files/Dockerfile', '/tmp/Dockerfile')
             with cd('/tmp/'):
-                sudo('docker build --file Dockerfile -t pre-base .')
-            sudo('docker login -u {0} -p {1} localhost:8083'.format(args.nexus_service_user_name,
+                conn.sudo('docker build --file Dockerfile -t pre-base .')
+            conn.sudo('docker login -u {0} -p {1} localhost:8083'.format(args.nexus_service_user_name,
                                                                     args.nexus_service_user_password))
-            sudo('docker tag pre-base localhost:8083/datalab-pre-base')
-            sudo('docker push localhost:8083/datalab-pre-base')
-            sudo('touch /home/{}/.ensure_dir/images_prepared'.format(configuration['conf_os_user']))
+            conn.sudo('docker tag pre-base localhost:8083/datalab-pre-base')
+            conn.sudo('docker push localhost:8083/datalab-pre-base')
+            conn.sudo('touch /home/{}/.ensure_dir/images_prepared'.format(configuration['conf_os_user']))
     except Exception as err:
         traceback.print_exc()
         print('Failed to download packages: ', str(err))
@@ -1288,19 +1288,19 @@ def prepare_images():
 def install_squid():
     try:
         if not exists('/home/{}/.ensure_dir/squid_installed'.format(configuration['conf_os_user'])):
-            sudo('apt-get -y install squid')
-            put('templates/squid.conf', '/etc/squid/', use_sudo=True)
+            conn.sudo('apt-get -y install squid')
+            conn.put('templates/squid.conf', '/etc/squid/', use_sudo=True)
             replace_string = ''
             for cidr in get_vpc_cidr_by_id(args.vpc_id):
                 replace_string += 'acl AWS_VPC_CIDR src {}\\n'.format(cidr)
-            sudo('sed -i "s|VPC_CIDRS|{}|g" /etc/squid/squid.conf'.format(replace_string))
+            conn.sudo('sed -i "s|VPC_CIDRS|{}|g" /etc/squid/squid.conf'.format(replace_string))
             replace_string = ''
             for cidr in args.allowed_ip_cidr.split(','):
                 replace_string += 'acl AllowedCIDRS src {}\\n'.format(cidr)
-            sudo('sed -i "s|ALLOWED_CIDRS|{}|g" /etc/squid/squid.conf'.format(replace_string))
-            sudo('systemctl enable squid')
-            sudo('systemctl restart squid')
-            sudo('touch /home/{}/.ensure_dir/squid_installed'.format(configuration['conf_os_user']))
+            conn.sudo('sed -i "s|ALLOWED_CIDRS|{}|g" /etc/squid/squid.conf'.format(replace_string))
+            conn.sudo('systemctl enable squid')
+            conn.sudo('systemctl restart squid')
+            conn.sudo('touch /home/{}/.ensure_dir/squid_installed'.format(configuration['conf_os_user']))
     except Exception as err:
         traceback.print_exc()
         print('Failed to download packages: ', str(err))
diff --git a/infrastructure-provisioning/src/base/scripts/create_ssh_user.py b/infrastructure-provisioning/src/base/scripts/create_ssh_user.py
index bede833..ef51eb7 100644
--- a/infrastructure-provisioning/src/base/scripts/create_ssh_user.py
+++ b/infrastructure-provisioning/src/base/scripts/create_ssh_user.py
@@ -38,16 +38,16 @@ args = parser.parse_args()
 
 def ensure_ssh_user(initial_user, os_user, sudo_group):
     if not exists('/home/{}/.ssh_user_ensured'.format(initial_user)):
-        sudo('useradd -m -G {1} -s /bin/bash {0}'.format(os_user, sudo_group))
-        sudo('echo "{} ALL = NOPASSWD:ALL" >> /etc/sudoers'.format(os_user))
-        sudo('mkdir /home/{}/.ssh'.format(os_user))
-        sudo('chown -R {0}:{0} /home/{1}/.ssh/'.format(initial_user, os_user))
-        sudo('cat /home/{0}/.ssh/authorized_keys > /home/{1}/.ssh/authorized_keys'.format(initial_user, os_user))
-        sudo('chown -R {0}:{0} /home/{0}/.ssh/'.format(os_user))
-        sudo('chmod 700 /home/{0}/.ssh'.format(os_user))
-        sudo('chmod 600 /home/{0}/.ssh/authorized_keys'.format(os_user))
-        sudo('mkdir /home/{}/.ensure_dir'.format(os_user))
-        sudo('touch /home/{}/.ssh_user_ensured'.format(initial_user))
+        conn.sudo('useradd -m -G {1} -s /bin/bash {0}'.format(os_user, sudo_group))
+        conn.sudo('echo "{} ALL = NOPASSWD:ALL" >> /etc/sudoers'.format(os_user))
+        conn.sudo('mkdir /home/{}/.ssh'.format(os_user))
+        conn.sudo('chown -R {0}:{0} /home/{1}/.ssh/'.format(initial_user, os_user))
+        conn.sudo('cat /home/{0}/.ssh/authorized_keys > /home/{1}/.ssh/authorized_keys'.format(initial_user, os_user))
+        conn.sudo('chown -R {0}:{0} /home/{0}/.ssh/'.format(os_user))
+        conn.sudo('chmod 700 /home/{0}/.ssh'.format(os_user))
+        conn.sudo('chmod 600 /home/{0}/.ssh/authorized_keys'.format(os_user))
+        conn.sudo('mkdir /home/{}/.ensure_dir'.format(os_user))
+        conn.sudo('touch /home/{}/.ssh_user_ensured'.format(initial_user))
 
 
 if __name__ == "__main__":
diff --git a/infrastructure-provisioning/src/base/scripts/install_prerequisites.py b/infrastructure-provisioning/src/base/scripts/install_prerequisites.py
index 44b8a8c..1725aae 100644
--- a/infrastructure-provisioning/src/base/scripts/install_prerequisites.py
+++ b/infrastructure-provisioning/src/base/scripts/install_prerequisites.py
@@ -43,12 +43,12 @@ args = parser.parse_args()
 
 def create_china_pip_conf_file():
     if not exists('/home/{}/pip_china_ensured'.format(args.user)):
-        sudo('touch /etc/pip.conf')
-        sudo('echo "[global]" >> /etc/pip.conf')
-        sudo('echo "timeout = 600" >> /etc/pip.conf')
-        sudo('echo "index-url = https://{}/simple/" >> /etc/pip.conf'.format(os.environ['conf_pypi_mirror']))
-        sudo('echo "trusted-host = {}" >> /etc/pip.conf'.format(os.environ['conf_pypi_mirror']))
-        sudo('touch /home/{}/pip_china_ensured'.format(args.user))
+        conn.sudo('touch /etc/pip.conf')
+        conn.sudo('echo "[global]" >> /etc/pip.conf')
+        conn.sudo('echo "timeout = 600" >> /etc/pip.conf')
+        conn.sudo('echo "index-url = https://{}/simple/" >> /etc/pip.conf'.format(os.environ['conf_pypi_mirror']))
+        conn.sudo('echo "trusted-host = {}" >> /etc/pip.conf'.format(os.environ['conf_pypi_mirror']))
+        conn.sudo('touch /home/{}/pip_china_ensured'.format(args.user))
 
 
 if __name__ == "__main__":
diff --git a/infrastructure-provisioning/src/base/scripts/install_user_key.py b/infrastructure-provisioning/src/base/scripts/install_user_key.py
index 013a5e1..d9cdd4c 100644
--- a/infrastructure-provisioning/src/base/scripts/install_user_key.py
+++ b/infrastructure-provisioning/src/base/scripts/install_user_key.py
@@ -38,8 +38,8 @@ args = parser.parse_args()
 def copy_key(config):
     admin_key_pub = local('ssh-keygen -y -f {}'.format(args.keyfile),
                           capture=True)
-    sudo('rm -f /home/{}/.ssh/authorized_keys'.format(args.user))
-    sudo('echo "{0}" >> /home/{1}/.ssh/authorized_keys'.format(admin_key_pub, args.user))
+    conn.sudo('rm -f /home/{}/.ssh/authorized_keys'.format(args.user))
+    conn.sudo('echo "{0}" >> /home/{1}/.ssh/authorized_keys'.format(admin_key_pub, args.user))
     try:
         user_key = '{}{}.pub'.format(
             config.get('user_keydir'),
@@ -49,7 +49,7 @@ def copy_key(config):
             key = open('{0}'.format(user_key)).read()
         else:
             key = config.get('user_key')
-        sudo('echo "{0}" >> /home/{1}/.ssh/authorized_keys'.format(key, args.user))
+        conn.sudo('echo "{0}" >> /home/{1}/.ssh/authorized_keys'.format(key, args.user))
     except:
         print('No user key')
 
diff --git a/infrastructure-provisioning/src/dataengine/scripts/configure_dataengine.py b/infrastructure-provisioning/src/dataengine/scripts/configure_dataengine.py
index daaa2a1..3d5b21f 100644
--- a/infrastructure-provisioning/src/dataengine/scripts/configure_dataengine.py
+++ b/infrastructure-provisioning/src/dataengine/scripts/configure_dataengine.py
@@ -82,33 +82,33 @@ if os.environ['application'] == 'deeplearning':
 def start_spark(os_user, master_ip, node):
     if not exists('/home/{0}/.ensure_dir/start_spark-{1}_ensured'.format(os_user, node)):
         if not exists('/opt/spark/conf/spark-env.sh'):
-            sudo('mv /opt/spark/conf/spark-env.sh.template /opt/spark/conf/spark-env.sh')
-        sudo('''echo "SPARK_MASTER_HOST='{}'" >> /opt/spark/conf/spark-env.sh'''.format(master_ip))
+            conn.sudo('mv /opt/spark/conf/spark-env.sh.template /opt/spark/conf/spark-env.sh')
+        conn.sudo('''echo "SPARK_MASTER_HOST='{}'" >> /opt/spark/conf/spark-env.sh'''.format(master_ip))
         if os.environ['application'] in ('tensor', 'tensor-rstudio'):
-            sudo('''echo "LD_LIBRARY_PATH=/opt/cudnn/lib64:/usr/local/cuda/lib64" >> /opt/spark/conf/spark-env.sh''')
+            conn.sudo('''echo "LD_LIBRARY_PATH=/opt/cudnn/lib64:/usr/local/cuda/lib64" >> /opt/spark/conf/spark-env.sh''')
         if os.environ['application'] == 'deeplearning':
-            sudo('''echo "LD_LIBRARY_PATH=/opt/cudnn/lib64:/usr/local/cuda/lib64:/usr/lib64/openmpi/lib" >> /opt/spark/conf/spark-env.sh''')
+            conn.sudo('''echo "LD_LIBRARY_PATH=/opt/cudnn/lib64:/usr/local/cuda/lib64:/usr/lib64/openmpi/lib" >> /opt/spark/conf/spark-env.sh''')
         if node == 'master':
             with cd('/opt/spark/sbin/'):
-                sudo("sed -i '/start-slaves.sh/d' start-all.sh")
-                sudo('''echo '"${}/sbin"/start-slave.sh spark://{}:7077' >> start-all.sh'''.format('{SPARK_HOME}', master_ip))
-            put('~/templates/spark-master.service', '/tmp/spark-master.service')
-            sudo('mv /tmp/spark-master.service /etc/systemd/system/spark-master.service')
-            sudo('systemctl daemon-reload')
-            sudo('systemctl enable spark-master.service')
-            sudo('systemctl start spark-master.service')
+                conn.sudo("sed -i '/start-slaves.sh/d' start-all.sh")
+                conn.sudo('''echo '"${}/sbin"/start-slave.sh spark://{}:7077' >> start-all.sh'''.format('{SPARK_HOME}', master_ip))
+            conn.put('~/templates/spark-master.service', '/tmp/spark-master.service')
+            conn.sudo('mv /tmp/spark-master.service /etc/systemd/system/spark-master.service')
+            conn.sudo('systemctl daemon-reload')
+            conn.sudo('systemctl enable spark-master.service')
+            conn.sudo('systemctl start spark-master.service')
         if node == 'slave':
             with open('/root/templates/spark-slave.service', 'r') as f:
                 text = f.read()
             text = text.replace('MASTER', 'spark://{}:7077'.format(master_ip))
             with open('/root/templates/spark-slave.service', 'w') as f:
                 f.write(text)
-            put('~/templates/spark-slave.service', '/tmp/spark-slave.service')
-            sudo('mv /tmp/spark-slave.service /etc/systemd/system/spark-slave.service')
-            sudo('systemctl daemon-reload')
-            sudo('systemctl enable spark-slave.service')
-            sudo('systemctl start spark-slave.service')
-        sudo('touch /home/{0}/.ensure_dir/start_spark-{1}_ensured'.format(os_user, node))
+            conn.put('~/templates/spark-slave.service', '/tmp/spark-slave.service')
+            conn.sudo('mv /tmp/spark-slave.service /etc/systemd/system/spark-slave.service')
+            conn.sudo('systemctl daemon-reload')
+            conn.sudo('systemctl enable spark-slave.service')
+            conn.sudo('systemctl start spark-slave.service')
+        conn.sudo('touch /home/{0}/.ensure_dir/start_spark-{1}_ensured'.format(os_user, node))
 
 ##############
 # Run script #
@@ -123,7 +123,7 @@ if __name__ == "__main__":
     print("Prepare .ensure directory")
     try:
         if not exists('/home/' + args.os_user + '/.ensure_dir'):
-            sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
+            conn.sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
     except:
         sys.exit(1)
 
@@ -200,22 +200,22 @@ if __name__ == "__main__":
     # INSTALL LIVY
     if not exists('/home/{0}/.ensure_dir/livy_ensured'.format(args.os_user)):
         livy_version = '0.7.0'
-        sudo(
+        conn.sudo(
             'wget -nv --timeout=30 --tries=5 --retry-connrefused https://archive.apache.org/dist/incubator/livy/{0}-incubating/apache-livy-{0}-incubating-bin.zip -P /tmp/'.format(
                 livy_version))
-        sudo('unzip -q /tmp/apache-livy-{}-incubating-bin.zip -d /tmp/'.format(livy_version))
-        sudo('mv /tmp/apache-livy-{}-incubating-bin /opt/livy'.format(livy_version))
-        sudo('mkdir /var/log/livy')
-        put('~/templates/livy-env.sh', '/tmp/livy-env.sh')
-        sudo('mv /tmp/livy-env.sh /opt/livy/conf/livy-env.sh')
-        sudo('chown -R -L {0}:{0} /opt/livy/'.format(args.os_user))
-        sudo('chown -R {0}:{0} /var/log/livy'.format(args.os_user))
-        put('~/templates/livy.service', '/tmp/livy.service')
-        sudo("sed -i 's|OS_USER|{}|' /tmp/livy.service".format(args.os_user))
-        sudo('mv /tmp/livy.service /etc/systemd/system/livy.service')
-        sudo('systemctl daemon-reload')
-        sudo('systemctl enable livy.service')
-        sudo('systemctl start livy.service')
-        sudo('touch /home/{0}/.ensure_dir/livy_ensured'.format(args.os_user))
+        conn.sudo('unzip -q /tmp/apache-livy-{}-incubating-bin.zip -d /tmp/'.format(livy_version))
+        conn.sudo('mv /tmp/apache-livy-{}-incubating-bin /opt/livy'.format(livy_version))
+        conn.sudo('mkdir /var/log/livy')
+        conn.put('~/templates/livy-env.sh', '/tmp/livy-env.sh')
+        conn.sudo('mv /tmp/livy-env.sh /opt/livy/conf/livy-env.sh')
+        conn.sudo('chown -R -L {0}:{0} /opt/livy/'.format(args.os_user))
+        conn.sudo('chown -R {0}:{0} /var/log/livy'.format(args.os_user))
+        conn.put('~/templates/livy.service', '/tmp/livy.service')
+        conn.sudo("sed -i 's|OS_USER|{}|' /tmp/livy.service".format(args.os_user))
+        conn.sudo('mv /tmp/livy.service /etc/systemd/system/livy.service')
+        conn.sudo('systemctl daemon-reload')
+        conn.sudo('systemctl enable livy.service')
+        conn.sudo('systemctl start livy.service')
+        conn.sudo('touch /home/{0}/.ensure_dir/livy_ensured'.format(args.os_user))
 
     datalab.fab.close_connection()
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py b/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py
index ad6ac5a..d2e7b15 100644
--- a/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py
+++ b/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py
@@ -86,9 +86,9 @@ def install_itorch(os_user):
             run('luarocks install uuid')
             run('luarocks install lzmq')
             run('luarocks make')
-        sudo('cp -rf /home/{0}/.ipython/kernels/itorch/ /home/{0}/.local/share/jupyter/kernels/'.format(os_user))
-        sudo('chown -R {0}:{0} /home/{0}/.local/share/jupyter/'.format(os_user))
-        sudo('touch /home/{}/.ensure_dir/itorch_ensured'.format(os_user))
+        conn.sudo('cp -rf /home/{0}/.ipython/kernels/itorch/ /home/{0}/.local/share/jupyter/kernels/'.format(os_user))
+        conn.sudo('chown -R {0}:{0} /home/{0}/.local/share/jupyter/'.format(os_user))
+        conn.sudo('touch /home/{}/.ensure_dir/itorch_ensured'.format(os_user))
 
 
 if __name__ == "__main__":
@@ -99,8 +99,8 @@ if __name__ == "__main__":
     print("Prepare .ensure directory")
     try:
         if not exists('/home/' + args.os_user + '/.ensure_dir'):
-            sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
-            sudo('touch /home/' + args.os_user + '/.ensure_dir/deep_learning')
+            conn.sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
+            conn.sudo('touch /home/' + args.os_user + '/.ensure_dir/deep_learning')
     except:
         sys.exit(1)
     print("Mount additional volume")
diff --git a/infrastructure-provisioning/src/general/lib/aws/actions_lib.py b/infrastructure-provisioning/src/general/lib/aws/actions_lib.py
index 5f7e8ad..0f4d8df 100644
--- a/infrastructure-provisioning/src/general/lib/aws/actions_lib.py
+++ b/infrastructure-provisioning/src/general/lib/aws/actions_lib.py
@@ -1259,22 +1259,22 @@ def remove_kernels(emr_name, tag_name, nb_tag_value, ssh_user, key_path, emr_ver
                 env.user = "{}".format(ssh_user)
                 env.key_filename = "{}".format(key_path)
                 env.host_string = env.user + "@" + env.hosts
-                sudo('rm -rf /home/{}/.local/share/jupyter/kernels/*_{}'.format(ssh_user, emr_name))
+                conn.sudo('rm -rf /home/{}/.local/share/jupyter/kernels/*_{}'.format(ssh_user, emr_name))
                 if exists('/home/{}/.ensure_dir/dataengine-service_{}_interpreter_ensured'.format(ssh_user, emr_name)):
                     if os.environ['notebook_multiple_clusters'] == 'true':
                         try:
-                            livy_port = sudo("cat /opt/" + emr_version + "/" + emr_name +
+                            livy_port = conn.sudo("cat /opt/" + emr_version + "/" + emr_name +
                                              "/livy/conf/livy.conf | grep livy.server.port | tail -n 1 | "
                                              "awk '{printf $3}'")
-                            process_number = sudo("netstat -natp 2>/dev/null | grep ':" + livy_port +
+                            process_number = conn.sudo("netstat -natp 2>/dev/null | grep ':" + livy_port +
                                                   "' | awk '{print $7}' | sed 's|/.*||g'")
-                            sudo('kill -9 ' + process_number)
-                            sudo('systemctl disable livy-server-' + livy_port)
+                            conn.sudo('kill -9 ' + process_number)
+                            conn.sudo('systemctl disable livy-server-' + livy_port)
                         except:
                             print("Wasn't able to find Livy server for this EMR!")
-                    sudo('sed -i \"s/^export SPARK_HOME.*/export SPARK_HOME=\/opt\/spark/\" '
+                    conn.sudo('sed -i \"s/^export SPARK_HOME.*/export SPARK_HOME=\/opt\/spark/\" '
                          '/opt/zeppelin/conf/zeppelin-env.sh')
-                    sudo("rm -rf /home/{}/.ensure_dir/dataengine-service_interpreter_ensure".format(ssh_user))
+                    conn.sudo("rm -rf /home/{}/.ensure_dir/dataengine-service_interpreter_ensure".format(ssh_user))
                     zeppelin_url = 'http://' + private + ':8080/api/interpreter/setting/'
                     opener = urllib2.build_opener(urllib2.ProxyHandler({}))
                     req = opener.open(urllib2.Request(zeppelin_url))
@@ -1289,23 +1289,23 @@ def remove_kernels(emr_name, tag_name, nb_tag_value, ssh_user, key_path, emr_ver
                             request.get_method = lambda: 'DELETE'
                             url = opener.open(request)
                             print(url.read())
-                    sudo('chown ' + ssh_user + ':' + ssh_user + ' -R /opt/zeppelin/')
-                    sudo('systemctl daemon-reload')
-                    sudo("service zeppelin-notebook stop")
-                    sudo("service zeppelin-notebook start")
+                    conn.sudo('chown ' + ssh_user + ':' + ssh_user + ' -R /opt/zeppelin/')
+                    conn.sudo('systemctl daemon-reload')
+                    conn.sudo("service zeppelin-notebook stop")
+                    conn.sudo("service zeppelin-notebook start")
                     zeppelin_restarted = False
                     while not zeppelin_restarted:
-                        sudo('sleep 5')
-                        result = sudo('nmap -p 8080 localhost | grep "closed" > /dev/null; echo $?')
+                        conn.sudo('sleep 5')
+                        result = conn.sudo('nmap -p 8080 localhost | grep "closed" > /dev/null; echo $?')
                         result = result[:1]
                         if result == '1':
                             zeppelin_restarted = True
-                    sudo('sleep 5')
-                    sudo('rm -rf /home/{}/.ensure_dir/dataengine-service_{}_interpreter_ensured'.format(ssh_user,
+                    conn.sudo('sleep 5')
+                    conn.sudo('rm -rf /home/{}/.ensure_dir/dataengine-service_{}_interpreter_ensured'.format(ssh_user,
                                                                                                         emr_name))
                 if exists('/home/{}/.ensure_dir/rstudio_dataengine-service_ensured'.format(ssh_user)):
                     datalab.fab.remove_rstudio_dataengines_kernel(computational_name, ssh_user)
-                sudo('rm -rf  /opt/' + emr_version + '/' + emr_name + '/')
+                conn.sudo('rm -rf  /opt/' + emr_version + '/' + emr_name + '/')
                 print("Notebook's {} kernels were removed".format(env.hosts))
         else:
             print("There are no notebooks to clean kernels.")
@@ -1627,14 +1627,14 @@ def spark_defaults(args):
 def ensure_local_jars(os_user, jars_dir):
     if not exists('/home/{}/.ensure_dir/local_jars_ensured'.format(os_user)):
         try:
-            sudo('mkdir -p {0}'.format(jars_dir))
-            sudo('wget https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aws/{0}/hadoop-aws-{0}.jar -O \
+            conn.sudo('mkdir -p {0}'.format(jars_dir))
+            conn.sudo('wget https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aws/{0}/hadoop-aws-{0}.jar -O \
                     {1}hadoop-aws-{0}.jar'.format('2.7.4', jars_dir))
-            sudo('wget https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk/{0}/aws-java-sdk-{0}.jar -O \
+            conn.sudo('wget https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk/{0}/aws-java-sdk-{0}.jar -O \
                     {1}aws-java-sdk-{0}.jar'.format('1.7.4', jars_dir))
-            # sudo('wget https://maven.twttr.com/com/hadoop/gplcompression/hadoop-lzo/{0}/hadoop-lzo-{0}.jar -O \
+            # conn.sudo('wget https://maven.twttr.com/com/hadoop/gplcompression/hadoop-lzo/{0}/hadoop-lzo-{0}.jar -O \
             #         {1}hadoop-lzo-{0}.jar'.format('0.4.20', jars_dir))
-            sudo('touch /home/{}/.ensure_dir/local_jars_ensured'.format(os_user))
+            conn.sudo('touch /home/{}/.ensure_dir/local_jars_ensured'.format(os_user))
         except:
             sys.exit(1)
 
@@ -1645,39 +1645,39 @@ def configure_local_spark(jars_dir, templates_dir, memory_type='driver'):
         spark_jars_paths = None
         if exists('/opt/spark/conf/spark-defaults.conf'):
             try:
-                spark_jars_paths = sudo('cat /opt/spark/conf/spark-defaults.conf | grep -e "^spark.jars " ')
+                spark_jars_paths = conn.sudo('cat /opt/spark/conf/spark-defaults.conf | grep -e "^spark.jars " ')
             except:
                 spark_jars_paths = None
-        region = sudo('curl http://169.254.169.254/latest/meta-data/placement/availability-zone')[:-1]
+        region = conn.sudo('curl http://169.254.169.254/latest/meta-data/placement/availability-zone')[:-1]
         if region == 'us-east-1':
             endpoint_url = 'https://s3.amazonaws.com'
         elif region == 'cn-north-1':
             endpoint_url = "https://s3.{}.amazonaws.com.cn".format(region)
         else:
             endpoint_url = 'https://s3-' + region + '.amazonaws.com'
-        put(templates_dir + 'notebook_spark-defaults_local.conf', '/tmp/notebook_spark-defaults_local.conf')
-        sudo('echo "spark.hadoop.fs.s3a.endpoint     {}" >> /tmp/notebook_spark-defaults_local.conf'.format(
+        conn.put(templates_dir + 'notebook_spark-defaults_local.conf', '/tmp/notebook_spark-defaults_local.conf')
+        conn.sudo('echo "spark.hadoop.fs.s3a.endpoint     {}" >> /tmp/notebook_spark-defaults_local.conf'.format(
             endpoint_url))
-        sudo('echo "spark.hadoop.fs.s3a.server-side-encryption-algorithm   AES256" >> '
+        conn.sudo('echo "spark.hadoop.fs.s3a.server-side-encryption-algorithm   AES256" >> '
              '/tmp/notebook_spark-defaults_local.conf')
         if not exists('/opt/spark/conf/spark-env.sh'):
-            sudo('mv /opt/spark/conf/spark-env.sh.template /opt/spark/conf/spark-env.sh')
-        java_home = run("update-alternatives --query java | grep -o --color=never \'/.*/java-8.*/jre\'").splitlines()[0]
-        sudo("echo 'export JAVA_HOME=\'{}\'' >> /opt/spark/conf/spark-env.sh".format(java_home))
+            conn.sudo('mv /opt/spark/conf/spark-env.sh.template /opt/spark/conf/spark-env.sh')
+        java_home = conn.run("update-alternatives --query java | grep -o --color=never \'/.*/java-8.*/jre\'").splitlines()[0]
+        conn.sudo("echo 'export JAVA_HOME=\'{}\'' >> /opt/spark/conf/spark-env.sh".format(java_home))
         if os.environ['application'] == 'zeppelin':
-            sudo('echo \"spark.jars $(ls -1 ' + jars_dir + '* | tr \'\\n\' \',\')\" >> '
+            conn.sudo('echo \"spark.jars $(ls -1 ' + jars_dir + '* | tr \'\\n\' \',\')\" >> '
                                                            '/tmp/notebook_spark-defaults_local.conf')
-        sudo('\cp -f /tmp/notebook_spark-defaults_local.conf /opt/spark/conf/spark-defaults.conf')
+        conn.sudo('\cp -f /tmp/notebook_spark-defaults_local.conf /opt/spark/conf/spark-defaults.conf')
         if memory_type == 'driver':
             spark_memory = datalab.fab.get_spark_memory()
-            sudo('sed -i "/spark.*.memory/d" /opt/spark/conf/spark-defaults.conf')
-            sudo('echo "spark.{0}.memory {1}m" >> /opt/spark/conf/spark-defaults.conf'.format(memory_type,
+            conn.sudo('sed -i "/spark.*.memory/d" /opt/spark/conf/spark-defaults.conf')
+            conn.sudo('echo "spark.{0}.memory {1}m" >> /opt/spark/conf/spark-defaults.conf'.format(memory_type,
                                                                                               spark_memory))
         if 'spark_configurations' in os.environ:
-            datalab_header = sudo('cat /tmp/notebook_spark-defaults_local.conf | grep "^#"')
+            datalab_header = conn.sudo('cat /tmp/notebook_spark-defaults_local.conf | grep "^#"')
             spark_configurations = ast.literal_eval(os.environ['spark_configurations'])
             new_spark_defaults = list()
-            spark_defaults = sudo('cat /opt/spark/conf/spark-defaults.conf')
+            spark_defaults = conn.sudo('cat /opt/spark/conf/spark-defaults.conf')
             current_spark_properties = spark_defaults.split('\n')
             for param in current_spark_properties:
                 if param.split(' ')[0] != '#':
@@ -1690,13 +1690,13 @@ def configure_local_spark(jars_dir, templates_dir, memory_type='driver'):
                                     new_spark_defaults.append(property + ' ' + config['Properties'][property])
                     new_spark_defaults.append(param)
             new_spark_defaults = set(new_spark_defaults)
-            sudo("echo '{}' > /opt/spark/conf/spark-defaults.conf".format(datalab_header))
+            conn.sudo("echo '{}' > /opt/spark/conf/spark-defaults.conf".format(datalab_header))
             for prop in new_spark_defaults:
                 prop = prop.rstrip()
-                sudo('echo "{}" >> /opt/spark/conf/spark-defaults.conf'.format(prop))
-            sudo('sed -i "/^\s*$/d" /opt/spark/conf/spark-defaults.conf')
+                conn.sudo('echo "{}" >> /opt/spark/conf/spark-defaults.conf'.format(prop))
+            conn.sudo('sed -i "/^\s*$/d" /opt/spark/conf/spark-defaults.conf')
             if spark_jars_paths:
-                sudo('echo "{}" >> /opt/spark/conf/spark-defaults.conf'.format(spark_jars_paths))
+                conn.sudo('echo "{}" >> /opt/spark/conf/spark-defaults.conf'.format(spark_jars_paths))
     except Exception as err:
         print('Error:', str(err))
         sys.exit(1)
@@ -1875,21 +1875,21 @@ def remove_dataengine_kernels(tag_name, notebook_name, os_user, key_path, cluste
         env.user = "{}".format(os_user)
         env.key_filename = "{}".format(key_path)
         env.host_string = env.user + "@" + env.hosts
-        sudo('rm -rf /home/{}/.local/share/jupyter/kernels/*_{}'.format(os_user, cluster_name))
+        conn.sudo('rm -rf /home/{}/.local/share/jupyter/kernels/*_{}'.format(os_user, cluster_name))
         if exists('/home/{}/.ensure_dir/dataengine_{}_interpreter_ensured'.format(os_user, cluster_name)):
             if os.environ['notebook_multiple_clusters'] == 'true':
                 try:
-                    livy_port = sudo("cat /opt/" + cluster_name +
+                    livy_port = conn.sudo("cat /opt/" + cluster_name +
                                      "/livy/conf/livy.conf | grep livy.server.port | tail -n 1 | awk '{printf $3}'")
-                    process_number = sudo("netstat -natp 2>/dev/null | grep ':" + livy_port +
+                    process_number = conn.sudo("netstat -natp 2>/dev/null | grep ':" + livy_port +
                                           "' | awk '{print $7}' | sed 's|/.*||g'")
-                    sudo('kill -9 ' + process_number)
-                    sudo('systemctl disable livy-server-' + livy_port)
+                    conn.sudo('kill -9 ' + process_number)
+                    conn.sudo('systemctl disable livy-server-' + livy_port)
                 except:
                     print("Wasn't able to find Livy server for this EMR!")
-            sudo(
+            conn.sudo(
                 'sed -i \"s/^export SPARK_HOME.*/export SPARK_HOME=\/opt\/spark/\" /opt/zeppelin/conf/zeppelin-env.sh')
-            sudo("rm -rf /home/{}/.ensure_dir/dataengine_interpreter_ensure".format(os_user))
+            conn.sudo("rm -rf /home/{}/.ensure_dir/dataengine_interpreter_ensure".format(os_user))
             zeppelin_url = 'http://' + private + ':8080/api/interpreter/setting/'
             opener = urllib2.build_opener(urllib2.ProxyHandler({}))
             req = opener.open(urllib2.Request(zeppelin_url))
@@ -1904,22 +1904,22 @@ def remove_dataengine_kernels(tag_name, notebook_name, os_user, key_path, cluste
                     request.get_method = lambda: 'DELETE'
                     url = opener.open(request)
                     print(url.read())
-            sudo('chown ' + os_user + ':' + os_user + ' -R /opt/zeppelin/')
-            sudo('systemctl daemon-reload')
-            sudo("service zeppelin-notebook stop")
-            sudo("service zeppelin-notebook start")
+            conn.sudo('chown ' + os_user + ':' + os_user + ' -R /opt/zeppelin/')
+            conn.sudo('systemctl daemon-reload')
+            conn.sudo("service zeppelin-notebook stop")
+            conn.sudo("service zeppelin-notebook start")
             zeppelin_restarted = False
             while not zeppelin_restarted:
-                sudo('sleep 5')
-                result = sudo('nmap -p 8080 localhost | grep "closed" > /dev/null; echo $?')
+                conn.sudo('sleep 5')
+                result = conn.sudo('nmap -p 8080 localhost | grep "closed" > /dev/null; echo $?')
                 result = result[:1]
                 if result == '1':
                     zeppelin_restarted = True
-            sudo('sleep 5')
-            sudo('rm -rf /home/{}/.ensure_dir/dataengine_{}_interpreter_ensured'.format(os_user, cluster_name))
+            conn.sudo('sleep 5')
+            conn.sudo('rm -rf /home/{}/.ensure_dir/dataengine_{}_interpreter_ensured'.format(os_user, cluster_name))
         if exists('/home/{}/.ensure_dir/rstudio_dataengine_ensured'.format(os_user)):
             datalab.fab.remove_rstudio_dataengines_kernel(os.environ['computational_name'], os_user)
-        sudo('rm -rf  /opt/' + cluster_name + '/')
+        conn.sudo('rm -rf  /opt/' + cluster_name + '/')
         print("Notebook's {} kernels were removed".format(env.hosts))
     except Exception as err:
         logging.info("Unable to remove kernels on Notebook: " + str(err) + "\n Traceback: " + traceback.print_exc(
@@ -1932,12 +1932,12 @@ def remove_dataengine_kernels(tag_name, notebook_name, os_user, key_path, cluste
 def prepare_disk(os_user):
     if not exists('/home/' + os_user + '/.ensure_dir/disk_ensured'):
         try:
-            disk_name = sudo("lsblk | grep disk | awk '{print $1}' | sort | tail -n 1")
-            sudo('''bash -c 'echo -e "o\nn\np\n1\n\n\nw" | fdisk /dev/{}' '''.format(disk_name))
-            sudo('mkfs.ext4 -F /dev/{}1'.format(disk_name))
-            sudo('mount /dev/{}1 /opt/'.format(disk_name))
-            sudo(''' bash -c "echo '/dev/{}1 /opt/ ext4 errors=remount-ro 0 1' >> /etc/fstab" '''.format(disk_name))
-            sudo('touch /home/' + os_user + '/.ensure_dir/disk_ensured')
+            disk_name = conn.sudo("lsblk | grep disk | awk '{print $1}' | sort | tail -n 1")
+            conn.sudo('''bash -c 'echo -e "o\nn\np\n1\n\n\nw" | fdisk /dev/{}' '''.format(disk_name))
+            conn.sudo('mkfs.ext4 -F /dev/{}1'.format(disk_name))
+            conn.sudo('mount /dev/{}1 /opt/'.format(disk_name))
+            conn.sudo(''' bash -c "echo '/dev/{}1 /opt/ ext4 errors=remount-ro 0 1' >> /etc/fstab" '''.format(disk_name))
+            conn.sudo('touch /home/' + os_user + '/.ensure_dir/disk_ensured')
         except:
             sys.exit(1)
 
@@ -1945,11 +1945,11 @@ def prepare_disk(os_user):
 def ensure_local_spark(os_user, spark_link, spark_version, hadoop_version, local_spark_path):
     if not exists('/home/' + os_user + '/.ensure_dir/local_spark_ensured'):
         try:
-            sudo('wget ' + spark_link + ' -O /tmp/spark-' + spark_version + '-bin-hadoop' + hadoop_version + '.tgz')
-            sudo('tar -zxvf /tmp/spark-' + spark_version + '-bin-hadoop' + hadoop_version + '.tgz -C /opt/')
-            sudo('mv /opt/spark-' + spark_version + '-bin-hadoop' + hadoop_version + ' ' + local_spark_path)
-            sudo('chown -R ' + os_user + ':' + os_user + ' ' + local_spark_path)
-            sudo('touch /home/' + os_user + '/.ensure_dir/local_spark_ensured')
+            conn.sudo('wget ' + spark_link + ' -O /tmp/spark-' + spark_version + '-bin-hadoop' + hadoop_version + '.tgz')
+            conn.sudo('tar -zxvf /tmp/spark-' + spark_version + '-bin-hadoop' + hadoop_version + '.tgz -C /opt/')
+            conn.sudo('mv /opt/spark-' + spark_version + '-bin-hadoop' + hadoop_version + ' ' + local_spark_path)
+            conn.sudo('chown -R ' + os_user + ':' + os_user + ' ' + local_spark_path)
+            conn.sudo('touch /home/' + os_user + '/.ensure_dir/local_spark_ensured')
         except Exception as err:
             print('Error:', str(err))
             sys.exit(1)
@@ -1976,7 +1976,7 @@ def find_des_jars(all_jars, des_path):
                     all_jars.remove(j)
         additional_jars = ['hadoop-aws', 'aws-java-sdk-s3', 'hadoop-lzo', 'aws-java-sdk-core']
         aws_filter = '\|'.join(additional_jars)
-        aws_jars = sudo('find {0} -name *.jar | grep "{1}"'.format(des_path, aws_filter)).split('\r\n')
+        aws_jars = conn.sudo('find {0} -name *.jar | grep "{1}"'.format(des_path, aws_filter)).split('\r\n')
         all_jars.extend(aws_jars)
         return all_jars
     except Exception as err:
diff --git a/infrastructure-provisioning/src/general/lib/azure/actions_lib.py b/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
index 2c51b51..ecaba1a 100644
--- a/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
+++ b/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
@@ -973,21 +973,21 @@ class AzureActions:
             env.user = "{}".format(os_user)
             env.key_filename = "{}".format(key_path)
             env.host_string = env.user + "@" + env.hosts
-            sudo('rm -rf /home/{}/.local/share/jupyter/kernels/*_{}'.format(os_user, cluster_name))
+            conn.sudo('rm -rf /home/{}/.local/share/jupyter/kernels/*_{}'.format(os_user, cluster_name))
             if exists('/home/{}/.ensure_dir/dataengine_{}_interpreter_ensured'.format(os_user, cluster_name)):
                 if os.environ['notebook_multiple_clusters'] == 'true':
                     try:
-                        livy_port = sudo("cat /opt/" + cluster_name +
+                        livy_port = conn.sudo("cat /opt/" + cluster_name +
                                          "/livy/conf/livy.conf | grep livy.server.port | tail -n 1 | awk '{printf $3}'")
-                        process_number = sudo("netstat -natp 2>/dev/null | grep ':" + livy_port +
+                        process_number = conn.sudo("netstat -natp 2>/dev/null | grep ':" + livy_port +
                                               "' | awk '{print $7}' | sed 's|/.*||g'")
-                        sudo('kill -9 ' + process_number)
-                        sudo('systemctl disable livy-server-' + livy_port)
+                        conn.sudo('kill -9 ' + process_number)
+                        conn.sudo('systemctl disable livy-server-' + livy_port)
                     except:
                         print("Wasn't able to find Livy server for this dataengine!")
-                sudo(
+                conn.sudo(
                     'sed -i \"s/^export SPARK_HOME.*/export SPARK_HOME=\/opt\/spark/\" /opt/zeppelin/conf/zeppelin-env.sh')
-                sudo("rm -rf /home/{}/.ensure_dir/dataengine_interpreter_ensure".format(os_user))
+                conn.sudo("rm -rf /home/{}/.ensure_dir/dataengine_interpreter_ensure".format(os_user))
                 zeppelin_url = 'http://' + private + ':8080/api/interpreter/setting/'
                 opener = urllib2.build_opener(urllib2.ProxyHandler({}))
                 req = opener.open(urllib2.Request(zeppelin_url))
@@ -1002,22 +1002,22 @@ class AzureActions:
                         request.get_method = lambda: 'DELETE'
                         url = opener.open(request)
                         print(url.read())
-                sudo('chown ' + os_user + ':' + os_user + ' -R /opt/zeppelin/')
-                sudo('systemctl daemon-reload')
-                sudo("service zeppelin-notebook stop")
-                sudo("service zeppelin-notebook start")
+                conn.sudo('chown ' + os_user + ':' + os_user + ' -R /opt/zeppelin/')
+                conn.sudo('systemctl daemon-reload')
+                conn.sudo("service zeppelin-notebook stop")
+                conn.sudo("service zeppelin-notebook start")
                 zeppelin_restarted = False
                 while not zeppelin_restarted:
-                    sudo('sleep 5')
-                    result = sudo('nmap -p 8080 localhost | grep "closed" > /dev/null; echo $?')
+                    conn.sudo('sleep 5')
+                    result = conn.sudo('nmap -p 8080 localhost | grep "closed" > /dev/null; echo $?')
                     result = result[:1]
                     if result == '1':
                         zeppelin_restarted = True
-                sudo('sleep 5')
-                sudo('rm -rf /home/{}/.ensure_dir/dataengine_{}_interpreter_ensured'.format(os_user, cluster_name))
+                conn.sudo('sleep 5')
+                conn.sudo('rm -rf /home/{}/.ensure_dir/dataengine_{}_interpreter_ensured'.format(os_user, cluster_name))
             if exists('/home/{}/.ensure_dir/rstudio_dataengine_ensured'.format(os_user)):
                 datalab.fab.remove_rstudio_dataengines_kernel(os.environ['computational_name'], os_user)
-            sudo('rm -rf  /opt/' + cluster_name + '/')
+            conn.sudo('rm -rf  /opt/' + cluster_name + '/')
             print("Notebook's {} kernels were removed".format(env.hosts))
         except Exception as err:
             logging.info("Unable to remove kernels on Notebook: " + str(err) + "\n Traceback: " + traceback.print_exc(
@@ -1063,27 +1063,27 @@ class AzureActions:
 def ensure_local_jars(os_user, jars_dir):
     if not exists('/home/{}/.ensure_dir/local_jars_ensured'.format(os_user)):
         try:
-            hadoop_version = sudo("ls /opt/spark/jars/hadoop-common* | sed -n 's/.*\([0-9]\.[0-9]\.[0-9]\).*/\\1/p'")
+            hadoop_version = conn.sudo("ls /opt/spark/jars/hadoop-common* | sed -n 's/.*\([0-9]\.[0-9]\.[0-9]\).*/\\1/p'")
             print("Downloading local jars for Azure")
-            sudo('mkdir -p {}'.format(jars_dir))
+            conn.sudo('mkdir -p {}'.format(jars_dir))
             if os.environ['azure_datalake_enable'] == 'false':
-                sudo('wget https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-azure/{0}/hadoop-azure-{0}.jar -O \
+                conn.sudo('wget https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-azure/{0}/hadoop-azure-{0}.jar -O \
                                  {1}hadoop-azure-{0}.jar'.format(hadoop_version, jars_dir))
-                sudo('wget https://repo1.maven.org/maven2/com/microsoft/azure/azure-storage/{0}/azure-storage-{0}.jar \
+                conn.sudo('wget https://repo1.maven.org/maven2/com/microsoft/azure/azure-storage/{0}/azure-storage-{0}.jar \
                     -O {1}azure-storage-{0}.jar'.format('2.2.0', jars_dir))
             else:
-                sudo('wget https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-azure/{0}/hadoop-azure-{0}.jar -O \
+                conn.sudo('wget https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-azure/{0}/hadoop-azure-{0}.jar -O \
                                  {1}hadoop-azure-{0}.jar'.format('3.0.0', jars_dir))
-                sudo('wget https://repo1.maven.org/maven2/com/microsoft/azure/azure-storage/{0}/azure-storage-{0}.jar \
+                conn.sudo('wget https://repo1.maven.org/maven2/com/microsoft/azure/azure-storage/{0}/azure-storage-{0}.jar \
                                     -O {1}azure-storage-{0}.jar'.format('6.1.0', jars_dir))
-                sudo('wget https://repo1.maven.org/maven2/com/microsoft/azure/azure-data-lake-store-sdk/{0}/azure-data-lake-store-sdk-{0}.jar \
+                conn.sudo('wget https://repo1.maven.org/maven2/com/microsoft/azure/azure-data-lake-store-sdk/{0}/azure-data-lake-store-sdk-{0}.jar \
                     -O {1}azure-data-lake-store-sdk-{0}.jar'.format('2.2.3', jars_dir))
-                sudo('wget https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-azure-datalake/{0}/hadoop-azure-datalake-{0}.jar \
+                conn.sudo('wget https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-azure-datalake/{0}/hadoop-azure-datalake-{0}.jar \
                     -O {1}hadoop-azure-datalake-{0}.jar'.format('3.0.0', jars_dir))
             if os.environ['application'] == 'tensor' or os.environ['application'] == 'deeplearning':
-                sudo('wget https://dl.bintray.com/spark-packages/maven/tapanalyticstoolkit/spark-tensorflow-connector/{0}/spark-tensorflow-connector-{0}.jar \
+                conn.sudo('wget https://dl.bintray.com/spark-packages/maven/tapanalyticstoolkit/spark-tensorflow-connector/{0}/spark-tensorflow-connector-{0}.jar \
                      -O {1}spark-tensorflow-connector-{0}.jar'.format('1.0.0-s_2.11', jars_dir))
-            sudo('touch /home/{}/.ensure_dir/local_jars_ensured'.format(os_user))
+            conn.sudo('touch /home/{}/.ensure_dir/local_jars_ensured'.format(os_user))
         except Exception as err:
             logging.info(
                 "Unable to download local jars: " + str(err) + "\n Traceback: " + traceback.print_exc(file=sys.stdout))
@@ -1099,7 +1099,7 @@ def configure_local_spark(jars_dir, templates_dir, memory_type='driver'):
         spark_jars_paths = None
         if exists('/opt/spark/conf/spark-defaults.conf'):
             try:
-                spark_jars_paths = sudo('cat /opt/spark/conf/spark-defaults.conf | grep -e "^spark.jars " ')
+                spark_jars_paths = conn.sudo('cat /opt/spark/conf/spark-defaults.conf | grep -e "^spark.jars " ')
             except:
                 spark_jars_paths = None
         user_storage_account_tag = "{}-{}-{}-bucket".format(os.environ['conf_service_base_name'],
@@ -1117,42 +1117,42 @@ def configure_local_spark(jars_dir, templates_dir, memory_type='driver'):
                 shared_storage_account_key = meta_lib.AzureMeta().list_storage_keys(
                     os.environ['azure_resource_group_name'], shared_storage_account_name)[0]
         if os.environ['azure_datalake_enable'] == 'false':
-            put(templates_dir + 'core-site-storage.xml', '/tmp/core-site.xml')
+            conn.put(templates_dir + 'core-site-storage.xml', '/tmp/core-site.xml')
         else:
-            put(templates_dir + 'core-site-datalake.xml', '/tmp/core-site.xml')
-        sudo('sed -i "s|USER_STORAGE_ACCOUNT|{}|g" /tmp/core-site.xml'.format(user_storage_account_name))
-        sudo('sed -i "s|SHARED_STORAGE_ACCOUNT|{}|g" /tmp/core-site.xml'.format(shared_storage_account_name))
-        sudo('sed -i "s|USER_ACCOUNT_KEY|{}|g" /tmp/core-site.xml'.format(user_storage_account_key))
-        sudo('sed -i "s|SHARED_ACCOUNT_KEY|{}|g" /tmp/core-site.xml'.format(shared_storage_account_key))
+            conn.put(templates_dir + 'core-site-datalake.xml', '/tmp/core-site.xml')
+        conn.sudo('sed -i "s|USER_STORAGE_ACCOUNT|{}|g" /tmp/core-site.xml'.format(user_storage_account_name))
+        conn.sudo('sed -i "s|SHARED_STORAGE_ACCOUNT|{}|g" /tmp/core-site.xml'.format(shared_storage_account_name))
+        conn.sudo('sed -i "s|USER_ACCOUNT_KEY|{}|g" /tmp/core-site.xml'.format(user_storage_account_key))
+        conn.sudo('sed -i "s|SHARED_ACCOUNT_KEY|{}|g" /tmp/core-site.xml'.format(shared_storage_account_key))
         if os.environ['azure_datalake_enable'] == 'true':
             client_id = os.environ['azure_application_id']
             refresh_token = os.environ['azure_user_refresh_token']
-            sudo('sed -i "s|CLIENT_ID|{}|g" /tmp/core-site.xml'.format(client_id))
-            sudo('sed -i "s|REFRESH_TOKEN|{}|g" /tmp/core-site.xml'.format(refresh_token))
+            conn.sudo('sed -i "s|CLIENT_ID|{}|g" /tmp/core-site.xml'.format(client_id))
+            conn.sudo('sed -i "s|REFRESH_TOKEN|{}|g" /tmp/core-site.xml'.format(refresh_token))
         if os.environ['azure_datalake_enable'] == 'false':
-            sudo('rm -f /opt/spark/conf/core-site.xml')
-            sudo('mv /tmp/core-site.xml /opt/spark/conf/core-site.xml')
+            conn.sudo('rm -f /opt/spark/conf/core-site.xml')
+            conn.sudo('mv /tmp/core-site.xml /opt/spark/conf/core-site.xml')
         else:
-            sudo('rm -f /opt/hadoop/etc/hadoop/core-site.xml')
-            sudo('mv /tmp/core-site.xml /opt/hadoop/etc/hadoop/core-site.xml')
-        put(templates_dir + 'notebook_spark-defaults_local.conf', '/tmp/notebook_spark-defaults_local.conf')
-        sudo("jar_list=`find {} -name '*.jar' | tr '\\n' ','` ; echo \"spark.jars   $jar_list\" >> \
+            conn.sudo('rm -f /opt/hadoop/etc/hadoop/core-site.xml')
+            conn.sudo('mv /tmp/core-site.xml /opt/hadoop/etc/hadoop/core-site.xml')
+        conn.put(templates_dir + 'notebook_spark-defaults_local.conf', '/tmp/notebook_spark-defaults_local.conf')
+        conn.sudo("jar_list=`find {} -name '*.jar' | tr '\\n' ','` ; echo \"spark.jars   $jar_list\" >> \
               /tmp/notebook_spark-defaults_local.conf".format(jars_dir))
-        sudo('\cp -f /tmp/notebook_spark-defaults_local.conf /opt/spark/conf/spark-defaults.conf')
+        conn.sudo('\cp -f /tmp/notebook_spark-defaults_local.conf /opt/spark/conf/spark-defaults.conf')
         if memory_type == 'driver':
             spark_memory = datalab.fab.get_spark_memory()
-            sudo('sed -i "/spark.*.memory/d" /opt/spark/conf/spark-defaults.conf')
-            sudo('echo "spark.{0}.memory {1}m" >> /opt/spark/conf/spark-defaults.conf'.format(memory_type,
+            conn.sudo('sed -i "/spark.*.memory/d" /opt/spark/conf/spark-defaults.conf')
+            conn.sudo('echo "spark.{0}.memory {1}m" >> /opt/spark/conf/spark-defaults.conf'.format(memory_type,
                                                                                               spark_memory))
         if not exists('/opt/spark/conf/spark-env.sh'):
-            sudo('mv /opt/spark/conf/spark-env.sh.template /opt/spark/conf/spark-env.sh')
-        java_home = run("update-alternatives --query java | grep -o --color=never \'/.*/java-8.*/jre\'").splitlines()[0]
-        sudo("echo 'export JAVA_HOME=\'{}\'' >> /opt/spark/conf/spark-env.sh".format(java_home))
+            conn.sudo('mv /opt/spark/conf/spark-env.sh.template /opt/spark/conf/spark-env.sh')
+        java_home = conn.run("update-alternatives --query java | grep -o --color=never \'/.*/java-8.*/jre\'").splitlines()[0]
+        conn.sudo("echo 'export JAVA_HOME=\'{}\'' >> /opt/spark/conf/spark-env.sh".format(java_home))
         if 'spark_configurations' in os.environ:
-            datalab_header = sudo('cat /tmp/notebook_spark-defaults_local.conf | grep "^#"')
+            datalab_header = conn.sudo('cat /tmp/notebook_spark-defaults_local.conf | grep "^#"')
             spark_configurations = ast.literal_eval(os.environ['spark_configurations'])
             new_spark_defaults = list()
-            spark_defaults = sudo('cat /opt/spark/conf/spark-defaults.conf')
+            spark_defaults = conn.sudo('cat /opt/spark/conf/spark-defaults.conf')
             current_spark_properties = spark_defaults.split('\n')
             for param in current_spark_properties:
                 if param.split(' ')[0] != '#':
@@ -1165,13 +1165,13 @@ def configure_local_spark(jars_dir, templates_dir, memory_type='driver'):
                                     new_spark_defaults.append(property + ' ' + config['Properties'][property])
                     new_spark_defaults.append(param)
             new_spark_defaults = set(new_spark_defaults)
-            sudo("echo '{}' > /opt/spark/conf/spark-defaults.conf".format(datalab_header))
+            conn.sudo("echo '{}' > /opt/spark/conf/spark-defaults.conf".format(datalab_header))
             for prop in new_spark_defaults:
                 prop = prop.rstrip()
-                sudo('echo "{}" >> /opt/spark/conf/spark-defaults.conf'.format(prop))
-            sudo('sed -i "/^\s*$/d" /opt/spark/conf/spark-defaults.conf')
+                conn.sudo('echo "{}" >> /opt/spark/conf/spark-defaults.conf'.format(prop))
+            conn.sudo('sed -i "/^\s*$/d" /opt/spark/conf/spark-defaults.conf')
             if spark_jars_paths:
-                sudo('echo "{}" >> /opt/spark/conf/spark-defaults.conf'.format(spark_jars_paths))
+                conn.sudo('echo "{}" >> /opt/spark/conf/spark-defaults.conf'.format(spark_jars_paths))
     except Exception as err:
         print('Error:', str(err))
         sys.exit(1)
@@ -1222,9 +1222,9 @@ def configure_dataengine_spark(cluster_name, jars_dir, cluster_dir, datalake_ena
 def remount_azure_disk(creds=False, os_user='', hostname='', keyfile=''):
     if creds:
         datalab.fab.init_datalab_connection(hostname, os_user, keyfile)
-    sudo('sed -i "/azure_resource-part1/ s|/mnt|/media|g" /etc/fstab')
-    sudo('grep "azure_resource-part1" /etc/fstab > /dev/null &&  umount -f /mnt/ || true')
-    sudo('mount -a')
+    conn.sudo('sed -i "/azure_resource-part1/ s|/mnt|/media|g" /etc/fstab')
+    conn.sudo('grep "azure_resource-part1" /etc/fstab > /dev/null &&  umount -f /mnt/ || true')
+    conn.sudo('mount -a')
     if creds:
         datalab.fab.close_connection()
 
@@ -1232,7 +1232,7 @@ def remount_azure_disk(creds=False, os_user='', hostname='', keyfile=''):
 def prepare_vm_for_image(creds=False, os_user='', hostname='', keyfile=''):
     if creds:
         datalab.fab.init_datalab_connection(hostname, os_user, keyfile)
-    sudo('waagent -deprovision -force')
+    conn.sudo('waagent -deprovision -force')
     if creds:
         datalab.fab.close_connection()
 
@@ -1243,33 +1243,33 @@ def prepare_disk(os_user):
             allow = False
             counter = 0
             remount_azure_disk()
-            disk_name = sudo("lsblk | grep disk | awk '{print $1}' | sort | tail -n 1")
+            disk_name = conn.sudo("lsblk | grep disk | awk '{print $1}' | sort | tail -n 1")
             with settings(warn_only=True):
-                sudo('umount -l /dev/{}1'.format(disk_name))
+                conn.sudo('umount -l /dev/{}1'.format(disk_name))
             while not allow:
                 if counter > 4:
                     print("Unable to prepare disk")
                     sys.exit(1)
                 else:
-                    sudo('''bash -c 'echo -e "o\nn\np\n1\n\n\nw" | fdisk /dev/{}' 2>&1 | tee /tmp/tee.tmp '''.format(
+                    conn.sudo('''bash -c 'echo -e "o\nn\np\n1\n\n\nw" | fdisk /dev/{}' 2>&1 | tee /tmp/tee.tmp '''.format(
                         disk_name), warn_only=True)
-                    out = sudo('cat /tmp/tee.tmp')
+                    out = conn.sudo('cat /tmp/tee.tmp')
                     if 'Syncing disks' in out:
                         allow = True
                     elif 'The kernel still uses the old table.' in out:
-                        if sudo('partprobe'):
+                        if conn.sudo('partprobe'):
                             with settings(warn_only=True):
                                 reboot(wait=180)
                         allow = True
                     else:
                         counter += 1
                         time.sleep(5)
-            sudo('umount -l /dev/{}1'.format(disk_name), warn_only=True)
-            sudo('mkfs.ext4 -F /dev/{}1'.format(disk_name))
-            sudo('mount /dev/{}1 /opt/'.format(disk_name))
-            sudo(''' bash -c "echo '/dev/{}1 /opt/ ext4 errors=remount-ro 0 1' >> /etc/fstab" '''.format(
+            conn.sudo('umount -l /dev/{}1'.format(disk_name), warn_only=True)
+            conn.sudo('mkfs.ext4 -F /dev/{}1'.format(disk_name))
+            conn.sudo('mount /dev/{}1 /opt/'.format(disk_name))
+            conn.sudo(''' bash -c "echo '/dev/{}1 /opt/ ext4 errors=remount-ro 0 1' >> /etc/fstab" '''.format(
                 disk_name))
-            sudo('touch /home/' + os_user + '/.ensure_dir/disk_ensured')
+            conn.sudo('touch /home/' + os_user + '/.ensure_dir/disk_ensured')
         except Exception as err:
             traceback.print_exc()
             print('Error:', str(err))
@@ -1280,35 +1280,35 @@ def ensure_local_spark(os_user, spark_link, spark_version, hadoop_version, local
     if not exists('/home/' + os_user + '/.ensure_dir/local_spark_ensured'):
         try:
             if os.environ['azure_datalake_enable'] == 'false':
-                sudo('wget ' + spark_link + ' -O /tmp/spark-' + spark_version + '-bin-hadoop' + hadoop_version + '.tgz')
-                sudo('tar -zxvf /tmp/spark-' + spark_version + '-bin-hadoop' + hadoop_version + '.tgz -C /opt/')
-                sudo('mv /opt/spark-' + spark_version + '-bin-hadoop' + hadoop_version + ' ' + local_spark_path)
-                sudo('chown -R ' + os_user + ':' + os_user + ' ' + local_spark_path)
-                sudo('touch /home/' + os_user + '/.ensure_dir/local_spark_ensured')
+                conn.sudo('wget ' + spark_link + ' -O /tmp/spark-' + spark_version + '-bin-hadoop' + hadoop_version + '.tgz')
+                conn.sudo('tar -zxvf /tmp/spark-' + spark_version + '-bin-hadoop' + hadoop_version + '.tgz -C /opt/')
+                conn.sudo('mv /opt/spark-' + spark_version + '-bin-hadoop' + hadoop_version + ' ' + local_spark_path)
+                conn.sudo('chown -R ' + os_user + ':' + os_user + ' ' + local_spark_path)
+                conn.sudo('touch /home/' + os_user + '/.ensure_dir/local_spark_ensured')
             else:
                 # Downloading Spark without Hadoop
-                sudo('wget https://archive.apache.org/dist/spark/spark-{0}/spark-{0}-bin-without-hadoop.tgz -O /tmp/spark-{0}-bin-without-hadoop.tgz'
+                conn.sudo('wget https://archive.apache.org/dist/spark/spark-{0}/spark-{0}-bin-without-hadoop.tgz -O /tmp/spark-{0}-bin-without-hadoop.tgz'
                     .format(spark_version))
-                sudo('tar -zxvf /tmp/spark-{}-bin-without-hadoop.tgz -C /opt/'.format(spark_version))
-                sudo('mv /opt/spark-{}-bin-without-hadoop {}'.format(spark_version, local_spark_path))
-                sudo('chown -R {0}:{0} {1}'.format(os_user, local_spark_path))
+                conn.sudo('tar -zxvf /tmp/spark-{}-bin-without-hadoop.tgz -C /opt/'.format(spark_version))
+                conn.sudo('mv /opt/spark-{}-bin-without-hadoop {}'.format(spark_version, local_spark_path))
+                conn.sudo('chown -R {0}:{0} {1}'.format(os_user, local_spark_path))
                 # Downloading Hadoop
                 hadoop_version = '3.0.0'
-                sudo('wget https://archive.apache.org/dist/hadoop/common/hadoop-{0}/hadoop-{0}.tar.gz -O /tmp/hadoop-{0}.tar.gz'
+                conn.sudo('wget https://archive.apache.org/dist/hadoop/common/hadoop-{0}/hadoop-{0}.tar.gz -O /tmp/hadoop-{0}.tar.gz'
                     .format(hadoop_version))
-                sudo('tar -zxvf /tmp/hadoop-{0}.tar.gz -C /opt/'.format(hadoop_version))
-                sudo('mv /opt/hadoop-{0} /opt/hadoop/'.format(hadoop_version))
-                sudo('chown -R {0}:{0} /opt/hadoop/'.format(os_user))
+                conn.sudo('tar -zxvf /tmp/hadoop-{0}.tar.gz -C /opt/'.format(hadoop_version))
+                conn.sudo('mv /opt/hadoop-{0} /opt/hadoop/'.format(hadoop_version))
+                conn.sudo('chown -R {0}:{0} /opt/hadoop/'.format(os_user))
                 # Configuring Hadoop and Spark
                 java_path = datalab.common_lib.find_java_path_remote()
-                sudo('echo "export JAVA_HOME={}" >> /opt/hadoop/etc/hadoop/hadoop-env.sh'.format(java_path))
-                sudo("""echo 'export HADOOP_CLASSPATH="$HADOOP_HOME/share/hadoop/tools/lib/*"' >> /opt/hadoop/etc/hadoop/hadoop-env.sh""")
-                sudo('echo "export HADOOP_HOME=/opt/hadoop/" >> /opt/spark/conf/spark-env.sh')
-                sudo('echo "export SPARK_HOME=/opt/spark/" >> /opt/spark/conf/spark-env.sh')
-                spark_dist_classpath = sudo('/opt/hadoop/bin/hadoop classpath')
-                sudo('echo "export SPARK_DIST_CLASSPATH={}" >> /opt/spark/conf/spark-env.sh'.format(
+                conn.sudo('echo "export JAVA_HOME={}" >> /opt/hadoop/etc/hadoop/hadoop-env.sh'.format(java_path))
+                conn.sudo("""echo 'export HADOOP_CLASSPATH="$HADOOP_HOME/share/hadoop/tools/lib/*"' >> /opt/hadoop/etc/hadoop/hadoop-env.sh""")
+                conn.sudo('echo "export HADOOP_HOME=/opt/hadoop/" >> /opt/spark/conf/spark-env.sh')
+                conn.sudo('echo "export SPARK_HOME=/opt/spark/" >> /opt/spark/conf/spark-env.sh')
+                spark_dist_classpath = conn.sudo('/opt/hadoop/bin/hadoop classpath')
+                conn.sudo('echo "export SPARK_DIST_CLASSPATH={}" >> /opt/spark/conf/spark-env.sh'.format(
                     spark_dist_classpath))
-                sudo('touch /home/{}/.ensure_dir/local_spark_ensured'.format(os_user))
+                conn.sudo('touch /home/{}/.ensure_dir/local_spark_ensured'.format(os_user))
         except Exception as err:
             print('Error:', str(err))
             sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py b/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py
index f217820..87cda96 100644
--- a/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py
+++ b/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py
@@ -1145,20 +1145,20 @@ class GCPActions:
             env.user = "{}".format(ssh_user)
             env.key_filename = "{}".format(key_path)
             env.host_string = env.user + "@" + env.hosts
-            sudo('rm -rf /home/{}/.local/share/jupyter/kernels/*_{}'.format(ssh_user, dataproc_name))
+            conn.sudo('rm -rf /home/{}/.local/share/jupyter/kernels/*_{}'.format(ssh_user, dataproc_name))
             if exists('/home/{}/.ensure_dir/dataengine-service_{}_interpreter_ensured'.format(ssh_user, dataproc_name)):
                 if os.environ['notebook_multiple_clusters'] == 'true':
                     try:
-                        livy_port = sudo("cat /opt/" + dataproc_version + "/" + dataproc_name
+                        livy_port = conn.sudo("cat /opt/" + dataproc_version + "/" + dataproc_name
                                          + "/livy/conf/livy.conf | grep livy.server.port | tail -n 1 | awk '{printf $3}'")
-                        process_number = sudo("netstat -natp 2>/dev/null | grep ':" + livy_port +
+                        process_number = conn.sudo("netstat -natp 2>/dev/null | grep ':" + livy_port +
                                               "' | awk '{print $7}' | sed 's|/.*||g'")
-                        sudo('kill -9 ' + process_number)
-                        sudo('systemctl disable livy-server-' + livy_port)
+                        conn.sudo('kill -9 ' + process_number)
+                        conn.sudo('systemctl disable livy-server-' + livy_port)
                     except:
                         print("Wasn't able to find Livy server for this EMR!")
-                sudo('sed -i \"s/^export SPARK_HOME.*/export SPARK_HOME=\/opt\/spark/\" /opt/zeppelin/conf/zeppelin-env.sh')
-                sudo("rm -rf /home/{}/.ensure_dir/dataengine-service_interpreter_ensure".format(ssh_user))
+                conn.sudo('sed -i \"s/^export SPARK_HOME.*/export SPARK_HOME=\/opt\/spark/\" /opt/zeppelin/conf/zeppelin-env.sh')
+                conn.sudo("rm -rf /home/{}/.ensure_dir/dataengine-service_interpreter_ensure".format(ssh_user))
                 zeppelin_url = 'http://' + notebook_ip + ':8080/api/interpreter/setting/'
                 opener = urllib2.build_opener(urllib2.ProxyHandler({}))
                 req = opener.open(urllib2.Request(zeppelin_url))
@@ -1173,20 +1173,20 @@ class GCPActions:
                         request.get_method = lambda: 'DELETE'
                         url = opener.open(request)
                         print(url.read())
-                sudo('chown {0}:{0} -R /opt/zeppelin/'.format(ssh_user))
-                sudo('systemctl restart zeppelin-notebook.service')
+                conn.sudo('chown {0}:{0} -R /opt/zeppelin/'.format(ssh_user))
+                conn.sudo('systemctl restart zeppelin-notebook.service')
                 zeppelin_restarted = False
                 while not zeppelin_restarted:
-                    sudo('sleep 5')
-                    result = sudo('nmap -p 8080 localhost | grep "closed" > /dev/null; echo $?')
+                    conn.sudo('sleep 5')
+                    result = conn.sudo('nmap -p 8080 localhost | grep "closed" > /dev/null; echo $?')
                     result = result[:1]
                     if result == '1':
                         zeppelin_restarted = True
-                sudo('sleep 5')
-                sudo('rm -rf /home/{}/.ensure_dir/dataengine-service_{}_interpreter_ensured'.format(ssh_user, dataproc_name))
+                conn.sudo('sleep 5')
+                conn.sudo('rm -rf /home/{}/.ensure_dir/dataengine-service_{}_interpreter_ensured'.format(ssh_user, dataproc_name))
             if exists('/home/{}/.ensure_dir/rstudio_dataengine-service_ensured'.format(ssh_user)):
                 datalab.fab.remove_rstudio_dataengines_kernel(computational_name, ssh_user)
-            sudo('rm -rf  /opt/{0}/{1}/'.format(dataproc_version, dataproc_name))
+            conn.sudo('rm -rf  /opt/{0}/{1}/'.format(dataproc_version, dataproc_name))
             print("Notebook's {} kernels were removed".format(env.hosts))
         except Exception as err:
             logging.info(
@@ -1337,19 +1337,19 @@ def ensure_local_jars(os_user, jars_dir):
     if not exists('/home/{}/.ensure_dir/gs_kernel_ensured'.format(os_user)):
         try:
             templates_dir = '/root/templates/'
-            sudo('mkdir -p {}'.format(jars_dir))
-            sudo('wget https://storage.googleapis.com/hadoop-lib/gcs/gcs-connector-hadoop2-{0}.jar -O {1}'
+            conn.sudo('mkdir -p {}'.format(jars_dir))
+            conn.sudo('wget https://storage.googleapis.com/hadoop-lib/gcs/gcs-connector-hadoop2-{0}.jar -O {1}'
                  'gcs-connector-hadoop2-{0}.jar'.format(os.environ['notebook_gcs_connector_version'], jars_dir))
-            sudo('wget https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-yarn-server-web-proxy/2.7.4/{0} -O {1}{0}'
+            conn.sudo('wget https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-yarn-server-web-proxy/2.7.4/{0} -O {1}{0}'
                  .format('hadoop-yarn-server-web-proxy-2.7.4.jar', jars_dir))
-            put(templates_dir + 'core-site.xml', '/tmp/core-site.xml')
-            sudo('sed -i "s|GCP_PROJECT_ID|{}|g" /tmp/core-site.xml'.format(os.environ['gcp_project_id']))
-            sudo('mv /tmp/core-site.xml /opt/spark/conf/core-site.xml')
-            put(templates_dir + 'notebook_spark-defaults_local.conf', '/tmp/notebook_spark-defaults_local.conf')
+            conn.put(templates_dir + 'core-site.xml', '/tmp/core-site.xml')
+            conn.sudo('sed -i "s|GCP_PROJECT_ID|{}|g" /tmp/core-site.xml'.format(os.environ['gcp_project_id']))
+            conn.sudo('mv /tmp/core-site.xml /opt/spark/conf/core-site.xml')
+            conn.put(templates_dir + 'notebook_spark-defaults_local.conf', '/tmp/notebook_spark-defaults_local.conf')
             if os.environ['application'] == 'zeppelin':
                 run('echo \"spark.jars $(ls -1 ' + jars_dir + '* | tr \'\\n\' \',\')\" >> /tmp/notebook_spark-defaults_local.conf')
-            sudo('\cp /tmp/notebook_spark-defaults_local.conf /opt/spark/conf/spark-defaults.conf')
-            sudo('touch /home/{}/.ensure_dir/gs_kernel_ensured'.format(os_user))
+            conn.sudo('\cp /tmp/notebook_spark-defaults_local.conf /opt/spark/conf/spark-defaults.conf')
+            conn.sudo('touch /home/{}/.ensure_dir/gs_kernel_ensured'.format(os_user))
         except Exception as err:
             print('Error:', str(err))
             sys.exit(1)
@@ -1372,12 +1372,12 @@ def installing_python(region, bucket, user_name, cluster_name, application='', p
 def prepare_disk(os_user):
     if not exists('/home/' + os_user + '/.ensure_dir/disk_ensured'):
         try:
-            disk_name = sudo("lsblk | grep disk | awk '{print $1}' | sort | tail -n 1")
-            sudo('''bash -c 'echo -e "o\nn\np\n1\n\n\nw" | fdisk /dev/{}' '''.format(disk_name))
-            sudo('mkfs.ext4 -F /dev/{}1'.format(disk_name))
-            sudo('mount /dev/{}1 /opt/'.format(disk_name))
-            sudo(''' bash -c "echo '/dev/{}1 /opt/ ext4 errors=remount-ro 0 1' >> /etc/fstab" '''.format(disk_name))
-            sudo('touch /home/' + os_user + '/.ensure_dir/disk_ensured')
+            disk_name = conn.sudo("lsblk | grep disk | awk '{print $1}' | sort | tail -n 1")
+            conn.sudo('''bash -c 'echo -e "o\nn\np\n1\n\n\nw" | fdisk /dev/{}' '''.format(disk_name))
+            conn.sudo('mkfs.ext4 -F /dev/{}1'.format(disk_name))
+            conn.sudo('mount /dev/{}1 /opt/'.format(disk_name))
+            conn.sudo(''' bash -c "echo '/dev/{}1 /opt/ ext4 errors=remount-ro 0 1' >> /etc/fstab" '''.format(disk_name))
+            conn.sudo('touch /home/' + os_user + '/.ensure_dir/disk_ensured')
         except:
             sys.exit(1)
 
@@ -1385,11 +1385,11 @@ def prepare_disk(os_user):
 def ensure_local_spark(os_user, spark_link, spark_version, hadoop_version, local_spark_path):
     if not exists('/home/' + os_user + '/.ensure_dir/local_spark_ensured'):
         try:
-            sudo('wget ' + spark_link + ' -O /tmp/spark-' + spark_version + '-bin-hadoop' + hadoop_version + '.tgz')
-            sudo('tar -zxvf /tmp/spark-' + spark_version + '-bin-hadoop' + hadoop_version + '.tgz -C /opt/')
-            sudo('mv /opt/spark-' + spark_version + '-bin-hadoop' + hadoop_version + ' ' + local_spark_path)
-            sudo('chown -R ' + os_user + ':' + os_user + ' ' + local_spark_path)
-            sudo('touch /home/' + os_user + '/.ensure_dir/local_spark_ensured')
+            conn.sudo('wget ' + spark_link + ' -O /tmp/spark-' + spark_version + '-bin-hadoop' + hadoop_version + '.tgz')
+            conn.sudo('tar -zxvf /tmp/spark-' + spark_version + '-bin-hadoop' + hadoop_version + '.tgz -C /opt/')
+            conn.sudo('mv /opt/spark-' + spark_version + '-bin-hadoop' + hadoop_version + ' ' + local_spark_path)
+            conn.sudo('chown -R ' + os_user + ':' + os_user + ' ' + local_spark_path)
+            conn.sudo('touch /home/' + os_user + '/.ensure_dir/local_spark_ensured')
         except Exception as err:
             print('Error:', str(err))
             sys.exit(1)
@@ -1401,27 +1401,27 @@ def configure_local_spark(jars_dir, templates_dir, memory_type='driver'):
         spark_jars_paths = None
         if exists('/opt/spark/conf/spark-defaults.conf'):
             try:
-                spark_jars_paths = sudo('cat /opt/spark/conf/spark-defaults.conf | grep -e "^spark.jars " ')
+                spark_jars_paths = conn.sudo('cat /opt/spark/conf/spark-defaults.conf | grep -e "^spark.jars " ')
             except:
                 spark_jars_paths = None
-        put(templates_dir + 'notebook_spark-defaults_local.conf', '/tmp/notebook_spark-defaults_local.conf')
+        conn.put(templates_dir + 'notebook_spark-defaults_local.conf', '/tmp/notebook_spark-defaults_local.conf')
         if os.environ['application'] == 'zeppelin':
             run('echo \"spark.jars $(ls -1 ' + jars_dir + '* | tr \'\\n\' \',\')\" >> /tmp/notebook_spark-defaults_local.conf')
-        sudo('\cp -f /tmp/notebook_spark-defaults_local.conf /opt/spark/conf/spark-defaults.conf')
+        conn.sudo('\cp -f /tmp/notebook_spark-defaults_local.conf /opt/spark/conf/spark-defaults.conf')
         if memory_type == 'driver':
             spark_memory = datalab.fab.get_spark_memory()
-            sudo('sed -i "/spark.*.memory/d" /opt/spark/conf/spark-defaults.conf')
-            sudo('echo "spark.{0}.memory {1}m" >> /opt/spark/conf/spark-defaults.conf'.format(memory_type,
+            conn.sudo('sed -i "/spark.*.memory/d" /opt/spark/conf/spark-defaults.conf')
+            conn.sudo('echo "spark.{0}.memory {1}m" >> /opt/spark/conf/spark-defaults.conf'.format(memory_type,
                                                                                               spark_memory))
         if not exists('/opt/spark/conf/spark-env.sh'):
-            sudo('mv /opt/spark/conf/spark-env.sh.template /opt/spark/conf/spark-env.sh')
+            conn.sudo('mv /opt/spark/conf/spark-env.sh.template /opt/spark/conf/spark-env.sh')
         java_home = run("update-alternatives --query java | grep -o --color=never \'/.*/java-8.*/jre\'").splitlines()[0]
-        sudo("echo 'export JAVA_HOME=\'{}\'' >> /opt/spark/conf/spark-env.sh".format(java_home))
+        conn.sudo("echo 'export JAVA_HOME=\'{}\'' >> /opt/spark/conf/spark-env.sh".format(java_home))
         if 'spark_configurations' in os.environ:
-            datalab_header = sudo('cat /tmp/notebook_spark-defaults_local.conf | grep "^#"')
+            datalab_header = conn.sudo('cat /tmp/notebook_spark-defaults_local.conf | grep "^#"')
             spark_configurations = ast.literal_eval(os.environ['spark_configurations'])
             new_spark_defaults = list()
-            spark_defaults = sudo('cat /opt/spark/conf/spark-defaults.conf')
+            spark_defaults = conn.sudo('cat /opt/spark/conf/spark-defaults.conf')
             current_spark_properties = spark_defaults.split('\n')
             for param in current_spark_properties:
                 if param.split(' ')[0] != '#':
@@ -1434,13 +1434,13 @@ def configure_local_spark(jars_dir, templates_dir, memory_type='driver'):
                                     new_spark_defaults.append(property + ' ' + config['Properties'][property])
                     new_spark_defaults.append(param)
             new_spark_defaults = set(new_spark_defaults)
-            sudo("echo '{}' > /opt/spark/conf/spark-defaults.conf".format(datalab_header))
+            conn.sudo("echo '{}' > /opt/spark/conf/spark-defaults.conf".format(datalab_header))
             for prop in new_spark_defaults:
                 prop = prop.rstrip()
-                sudo('echo "{}" >> /opt/spark/conf/spark-defaults.conf'.format(prop))
-            sudo('sed -i "/^\s*$/d" /opt/spark/conf/spark-defaults.conf')
+                conn.sudo('echo "{}" >> /opt/spark/conf/spark-defaults.conf'.format(prop))
+            conn.sudo('sed -i "/^\s*$/d" /opt/spark/conf/spark-defaults.conf')
             if spark_jars_paths:
-                sudo('echo "{}" >> /opt/spark/conf/spark-defaults.conf'.format(spark_jars_paths))
+                conn.sudo('echo "{}" >> /opt/spark/conf/spark-defaults.conf'.format(spark_jars_paths))
     except Exception as err:
         print('Error:', str(err))
         sys.exit(1)
@@ -1454,21 +1454,21 @@ def remove_dataengine_kernels(notebook_name, os_user, key_path, cluster_name):
         env.user = "{}".format(os_user)
         env.key_filename = "{}".format(key_path)
         env.host_string = env.user + "@" + env.hosts
-        sudo('rm -rf /home/{}/.local/share/jupyter/kernels/*_{}'.format(os_user, cluster_name))
+        conn.sudo('rm -rf /home/{}/.local/share/jupyter/kernels/*_{}'.format(os_user, cluster_name))
         if exists('/home/{}/.ensure_dir/dataengine_{}_interpreter_ensured'.format(os_user, cluster_name)):
             if os.environ['notebook_multiple_clusters'] == 'true':
                 try:
-                    livy_port = sudo("cat /opt/" + cluster_name +
+                    livy_port = conn.sudo("cat /opt/" + cluster_name +
                                      "/livy/conf/livy.conf | grep livy.server.port | tail -n 1 | awk '{printf $3}'")
-                    process_number = sudo("netstat -natp 2>/dev/null | grep ':" + livy_port +
+                    process_number = conn.sudo("netstat -natp 2>/dev/null | grep ':" + livy_port +
                                           "' | awk '{print $7}' | sed 's|/.*||g'")
-                    sudo('kill -9 ' + process_number)
-                    sudo('systemctl disable livy-server-' + livy_port)
+                    conn.sudo('kill -9 ' + process_number)
+                    conn.sudo('systemctl disable livy-server-' + livy_port)
                 except:
                     print("Wasn't able to find Livy server for this EMR!")
-            sudo(
+            conn.sudo(
                 'sed -i \"s/^export SPARK_HOME.*/export SPARK_HOME=\/opt\/spark/\" /opt/zeppelin/conf/zeppelin-env.sh')
-            sudo("rm -rf /home/{}/.ensure_dir/dataengine_interpreter_ensure".format(os_user))
+            conn.sudo("rm -rf /home/{}/.ensure_dir/dataengine_interpreter_ensure".format(os_user))
             zeppelin_url = 'http://' + private + ':8080/api/interpreter/setting/'
             opener = urllib2.build_opener(urllib2.ProxyHandler({}))
             req = opener.open(urllib2.Request(zeppelin_url))
@@ -1483,22 +1483,22 @@ def remove_dataengine_kernels(notebook_name, os_user, key_path, cluster_name):
                     request.get_method = lambda: 'DELETE'
                     url = opener.open(request)
                     print(url.read())
-            sudo('chown ' + os_user + ':' + os_user + ' -R /opt/zeppelin/')
-            sudo('systemctl daemon-reload')
-            sudo("service zeppelin-notebook stop")
-            sudo("service zeppelin-notebook start")
+            conn.sudo('chown ' + os_user + ':' + os_user + ' -R /opt/zeppelin/')
+            conn.sudo('systemctl daemon-reload')
+            conn.sudo("service zeppelin-notebook stop")
+            conn.sudo("service zeppelin-notebook start")
             zeppelin_restarted = False
             while not zeppelin_restarted:
-                sudo('sleep 5')
-                result = sudo('nmap -p 8080 localhost | grep "closed" > /dev/null; echo $?')
+                conn.sudo('sleep 5')
+                result = conn.sudo('nmap -p 8080 localhost | grep "closed" > /dev/null; echo $?')
                 result = result[:1]
                 if result == '1':
                     zeppelin_restarted = True
-            sudo('sleep 5')
-            sudo('rm -rf /home/{}/.ensure_dir/dataengine_{}_interpreter_ensured'.format(os_user, cluster_name))
+            conn.sudo('sleep 5')
+            conn.sudo('rm -rf /home/{}/.ensure_dir/dataengine_{}_interpreter_ensured'.format(os_user, cluster_name))
         if exists('/home/{}/.ensure_dir/rstudio_dataengine_ensured'.format(os_user)):
             datalab.fab.remove_rstudio_dataengines_kernel(computational_name, os_user)
-        sudo('rm -rf  /opt/' + cluster_name + '/')
+        conn.sudo('rm -rf  /opt/' + cluster_name + '/')
         print("Notebook's {} kernels were removed".format(env.hosts))
     except Exception as err:
         logging.info("Unable to remove kernels on Notebook: " + str(err) + "\n Traceback: " + traceback.print_exc(
diff --git a/infrastructure-provisioning/src/general/lib/os/debian/common_lib.py b/infrastructure-provisioning/src/general/lib/os/debian/common_lib.py
index eec1983..5a38eaf 100644
--- a/infrastructure-provisioning/src/general/lib/os/debian/common_lib.py
+++ b/infrastructure-provisioning/src/general/lib/os/debian/common_lib.py
@@ -39,45 +39,45 @@ def manage_pkg(command, environment, requisites):
             else:
                 print('Package manager is:')
                 if environment == 'remote':
-                    if sudo('pgrep "^apt" -a && echo "busy" || echo "ready"') == 'busy' or sudo('pgrep "^dpkg" -a && echo "busy" || echo "ready"') == 'busy':
+                    if conn.sudo('pgrep "^apt" -a && echo "busy" || echo "ready"') == 'busy' or conn.sudo('pgrep "^dpkg" -a && echo "busy" || echo "ready"') == 'busy':
                         counter += 1
                         time.sleep(10)
                     else:
                         try:
                             error_parser = "frontend is locked|locked"
-                            sudo('dpkg --configure -a 2>&1 | tee /tmp/tee.tmp; if ! grep -w -E "({0})" /tmp/tee.tmp > '
+                            conn.sudo('dpkg --configure -a 2>&1 | tee /tmp/tee.tmp; if ! grep -w -E "({0})" /tmp/tee.tmp > '
                                      '/tmp/dpkg.log; then echo "" > /tmp/dpkg.log;fi'.format(error_parser))
-                            err = sudo('cat /tmp/dpkg.log')
+                            err = conn.sudo('cat /tmp/dpkg.log')
                             count = 0
                             while err != '' and count < 10:
-                                pid = sudo('lsof /var/lib/dpkg/lock-frontend | grep dpkg | awk \'{print $2}\'')
+                                pid = conn.sudo('lsof /var/lib/dpkg/lock-frontend | grep dpkg | awk \'{print $2}\'')
                                 if pid != '':
-                                    sudo('kill -9 {}'.format(pid))
-                                    sudo('rm -f /var/lib/dpkg/lock-frontend')
-                                    pid = sudo('lsof /var/lib/dpkg/lock | grep dpkg | awk \'{print $2}\'')
+                                    conn.sudo('kill -9 {}'.format(pid))
+                                    conn.sudo('rm -f /var/lib/dpkg/lock-frontend')
+                                    pid = conn.sudo('lsof /var/lib/dpkg/lock | grep dpkg | awk \'{print $2}\'')
                                 elif pid != '':
-                                    sudo('kill -9 {}'.format(pid))
-                                    sudo('rm -f /var/lib/dpkg/lock')
-                                sudo('dpkg --configure -a 2>&1 | tee /tmp/tee.tmp; if ! grep -w -E "({0})" /tmp/tee.tmp > '
+                                    conn.sudo('kill -9 {}'.format(pid))
+                                    conn.sudo('rm -f /var/lib/dpkg/lock')
+                                conn.sudo('dpkg --configure -a 2>&1 | tee /tmp/tee.tmp; if ! grep -w -E "({0})" /tmp/tee.tmp > '
                                      '/tmp/dpkg.log; then echo "" > /tmp/dpkg.log;fi'.format(error_parser))
-                                err = sudo('cat /tmp/dpkg.log')
+                                err = conn.sudo('cat /tmp/dpkg.log')
                                 count = count + 1
-                            sudo('apt update')
+                            conn.sudo('apt update')
 
-                            sudo('apt-get {0} {1} 2>&1 | tee /tmp/tee.tmp; if ! grep -w -E "({2})" /tmp/tee.tmp > '
+                            conn.sudo('apt-get {0} {1} 2>&1 | tee /tmp/tee.tmp; if ! grep -w -E "({2})" /tmp/tee.tmp > '
                                  '/tmp/apt.log; then echo "" > /tmp/apt.log;fi'.format(command, requisites, error_parser))
-                            err = sudo('cat /tmp/apt.log')
+                            err = conn.sudo('cat /tmp/apt.log')
                             count = 0
                             while err != '' and count < 10:
-                                sudo('lsof /var/lib/dpkg/lock')
-                                sudo('lsof /var/lib/apt/lists/lock')
-                                sudo('lsof /var/cache/apt/archives/lock')
-                                sudo('rm -f /var/lib/apt/lists/lock')
-                                sudo('rm -f /var/cache/apt/archives/lock')
-                                sudo('rm -f /var/lib/dpkg/lock')
-                                sudo('apt-get {0} {1} 2>&1 | tee /tmp/tee.tmp; if ! grep -w -E "({2})" /tmp/tee.tmp > '
+                                conn.sudo('lsof /var/lib/dpkg/lock')
+                                conn.sudo('lsof /var/lib/apt/lists/lock')
+                                conn.sudo('lsof /var/cache/apt/archives/lock')
+                                conn.sudo('rm -f /var/lib/apt/lists/lock')
+                                conn.sudo('rm -f /var/cache/apt/archives/lock')
+                                conn.sudo('rm -f /var/lib/dpkg/lock')
+                                conn.sudo('apt-get {0} {1} 2>&1 | tee /tmp/tee.tmp; if ! grep -w -E "({2})" /tmp/tee.tmp > '
                                      '/tmp/apt.log; then echo "" > /tmp/apt.log;fi'.format(command, requisites, error_parser))
-                                err = sudo('cat /tmp/apt.log')
+                                err = conn.sudo('cat /tmp/apt.log')
                                 count = count + 1
                             allow = True
                         except Exception as err:
@@ -114,13 +114,13 @@ def ensure_pkg(user, requisites='linux-headers-generic python3-pip python3-dev p
                         print("Attempt number " + str(count) + " to install requested tools. Max 60 tries.")
                         manage_pkg('update', 'remote', '')
                         manage_pkg('-y install', 'remote', requisites)
-                        sudo('unattended-upgrades -v')
-                        sudo(
+                        conn.sudo('unattended-upgrades -v')
+                        conn.sudo(
                             'sed -i \'s|APT::Periodic::Unattended-Upgrade "1"|APT::Periodic::Unattended-Upgrade "0"|\' /etc/apt/apt.conf.d/20auto-upgrades')
-                        sudo('export LC_ALL=C')
-                        sudo('touch /home/{}/.ensure_dir/pkg_upgraded'.format(user))
-                        sudo('systemctl enable haveged')
-                        sudo('systemctl start haveged')
+                        conn.sudo('export LC_ALL=C')
+                        conn.sudo('touch /home/{}/.ensure_dir/pkg_upgraded'.format(user))
+                        conn.sudo('systemctl enable haveged')
+                        conn.sudo('systemctl start haveged')
                         if os.environ['conf_cloud_provider'] == 'aws':
                             manage_pkg('-y install --install-recommends', 'remote', 'linux-aws-hwe')
                         check = True
@@ -134,22 +134,22 @@ def ensure_pkg(user, requisites='linux-headers-generic python3-pip python3-dev p
 def renew_gpg_key():
     try:
 #        if exists('/etc/apt/trusted.gpg'):
-#            sudo('mv /etc/apt/trusted.gpg /etc/apt/trusted.bkp')
-        sudo('apt-key update')
+#            conn.sudo('mv /etc/apt/trusted.gpg /etc/apt/trusted.bkp')
+        conn.sudo('apt-key update')
     except:
         sys.exit(1)
 
 
 def change_pkg_repos():
     if not exists('/tmp/pkg_china_ensured'):
-        put('/root/files/sources.list', '/tmp/sources.list')
-        sudo('mv /tmp/sources.list /etc/apt/sources.list')
+        conn.put('/root/files/sources.list', '/tmp/sources.list')
+        conn.sudo('mv /tmp/sources.list /etc/apt/sources.list')
         manage_pkg('update', 'remote', '')
-        sudo('touch /tmp/pkg_china_ensured')
+        conn.sudo('touch /tmp/pkg_china_ensured')
 
 
 def find_java_path_remote():
-    java_path = sudo("sh -c \"update-alternatives --query java | grep 'Value: ' | grep -o '/.*/jre'\"")
+    java_path = conn.sudo("sh -c \"update-alternatives --query java | grep 'Value: ' | grep -o '/.*/jre'\"")
     return java_path
 
 
@@ -161,14 +161,14 @@ def find_java_path_local():
 def ensure_ntpd(user, edge_private_ip=''):
     try:
         if not exists('/home/{}/.ensure_dir/ntpd_ensured'.format(user)):
-            sudo('timedatectl set-ntp no')
+            conn.sudo('timedatectl set-ntp no')
             manage_pkg('-y install', 'remote', 'ntp ntpdate')
-            sudo('echo "tinker panic 0" >> /etc/ntp.conf')
+            conn.sudo('echo "tinker panic 0" >> /etc/ntp.conf')
             if os.environ['conf_resource'] != 'ssn' and os.environ['conf_resource'] != 'edge':
-                sudo('echo "server {} prefer iburst" >> /etc/ntp.conf'.format(edge_private_ip))
-            sudo('systemctl restart ntp')
-            sudo('systemctl enable ntp')
-            sudo('touch /home/{}/.ensure_dir/ntpd_ensured'.format(user))
+                conn.sudo('echo "server {} prefer iburst" >> /etc/ntp.conf'.format(edge_private_ip))
+            conn.sudo('systemctl restart ntp')
+            conn.sudo('systemctl enable ntp')
+            conn.sudo('touch /home/{}/.ensure_dir/ntpd_ensured'.format(user))
     except:
         sys.exit(1)
 
@@ -177,7 +177,7 @@ def ensure_java(user):
     try:
         if not exists('/home/{}/.ensure_dir/java_ensured'.format(user)):
             manage_pkg('-y install', 'remote', 'openjdk-8-jdk')
-            sudo('touch /home/{}/.ensure_dir/java_ensured'.format(user))
+            conn.sudo('touch /home/{}/.ensure_dir/java_ensured'.format(user))
     except:
         sys.exit(1)
 
@@ -186,10 +186,10 @@ def ensure_step(user):
     try:
         if not exists('/home/{}/.ensure_dir/step_ensured'.format(user)):
             manage_pkg('-y install', 'remote', 'wget')
-            sudo('wget https://github.com/smallstep/cli/releases/download/v0.13.3/step-cli_0.13.3_amd64.deb '
+            conn.sudo('wget https://github.com/smallstep/cli/releases/download/v0.13.3/step-cli_0.13.3_amd64.deb '
                  '-O /tmp/step-cli_0.13.3_amd64.deb')
-            sudo('dpkg -i /tmp/step-cli_0.13.3_amd64.deb')
-            sudo('touch /home/{}/.ensure_dir/step_ensured'.format(user))
+            conn.sudo('dpkg -i /tmp/step-cli_0.13.3_amd64.deb')
+            conn.sudo('touch /home/{}/.ensure_dir/step_ensured'.format(user))
     except:
         sys.exit(1)
 
@@ -197,12 +197,12 @@ def install_certbot(os_family):
     try:
         print('Installing Certbot')
         if os_family == 'debian':
-            sudo('apt-get -y update')
-            sudo('apt-get -y install software-properties-common')
-            sudo('add-apt-repository -y universe')
-            sudo('add-apt-repository -y ppa:certbot/certbot')
-            sudo('apt-get -y update')
-            sudo('apt-get -y install certbot')
+            conn.sudo('apt-get -y update')
+            conn.sudo('apt-get -y install software-properties-common')
+            conn.sudo('add-apt-repository -y universe')
+            conn.sudo('add-apt-repository -y ppa:certbot/certbot')
+            conn.sudo('apt-get -y update')
+            conn.sudo('apt-get -y install certbot')
         elif os_family == 'redhat':
             print('This OS family is not supported yet')
     except Exception as err:
@@ -214,13 +214,13 @@ def run_certbot(domain_name, node, email=''):
     try:
         print('Running  Certbot')
         if node == 'ssn':
-            sudo('service nginx stop')
+            conn.sudo('service nginx stop')
         else:
-            sudo('service openresty stop')
+            conn.sudo('service openresty stop')
         if email != '':
-            sudo('certbot certonly --standalone -n -d {}.{} -m {} --agree-tos'.format(node, domain_name, email))
+            conn.sudo('certbot certonly --standalone -n -d {}.{} -m {} --agree-tos'.format(node, domain_name, email))
         else:
-            sudo('certbot certonly --standalone -n -d {}.{} --register-unsafely-without-email --agree-tos'.format(node, domain_name))
+            conn.sudo('certbot certonly --standalone -n -d {}.{} --register-unsafely-without-email --agree-tos'.format(node, domain_name))
     except Exception as err:
         traceback.print_exc()
         print('Failed to run Certbot: ' + str(err))
@@ -237,14 +237,14 @@ def configure_nginx_LE(domain_name, node):
             nginx_config_path = '/etc/nginx/conf.d/nginx_proxy.conf'
         else:
             nginx_config_path = '/usr/local/openresty/nginx/conf/conf.d/proxy.conf'
-        sudo('sed -i "s|.*    server_name .*|{}|" {}'.format(server_name_line, nginx_config_path))
-        sudo('sed -i "s|.*    ssl_certificate .*|{}|" {}'.format(cert_path_line, nginx_config_path))
-        sudo('sed -i "s|.*    ssl_certificate_key .*|{}|" {}'.format(cert_key_line, nginx_config_path))
-        sudo('sed -i "s|.*ExecStart.*|{}|" {}'.format(certbot_service, certbot_service_path))
+        conn.sudo('sed -i "s|.*    server_name .*|{}|" {}'.format(server_name_line, nginx_config_path))
+        conn.sudo('sed -i "s|.*    ssl_certificate .*|{}|" {}'.format(cert_path_line, nginx_config_path))
+        conn.sudo('sed -i "s|.*    ssl_certificate_key .*|{}|" {}'.format(cert_key_line, nginx_config_path))
+        conn.sudo('sed -i "s|.*ExecStart.*|{}|" {}'.format(certbot_service, certbot_service_path))
         if node == 'ssn':
-            sudo('systemctl restart nginx')
+            conn.sudo('systemctl restart nginx')
         else:
-            sudo('systemctl restart openresty')
+            conn.sudo('systemctl restart openresty')
     except Exception as err:
         traceback.print_exc()
         print('Failed to run Certbot: ' + str(err))
diff --git a/infrastructure-provisioning/src/general/lib/os/debian/edge_lib.py b/infrastructure-provisioning/src/general/lib/os/debian/edge_lib.py
index 62e02fb..2e003c0 100644
--- a/infrastructure-provisioning/src/general/lib/os/debian/edge_lib.py
+++ b/infrastructure-provisioning/src/general/lib/os/debian/edge_lib.py
@@ -37,25 +37,25 @@ def configure_http_proxy_server(config):
             manage_pkg('-y install', 'remote', 'squid')
             template_file = config['template_file']
             proxy_subnet = config['exploratory_subnet']
-            put(template_file, '/tmp/squid.conf')
-            sudo('\cp /tmp/squid.conf /etc/squid/squid.conf')
-            sudo('sed -i "s|PROXY_SUBNET|{}|g" /etc/squid/squid.conf'.format(proxy_subnet))
-            sudo('sed -i "s|EDGE_USER_NAME|{}|g" /etc/squid/squid.conf'.format(config['project_name']))
-            sudo('sed -i "s|LDAP_HOST|{}|g" /etc/squid/squid.conf'.format(config['ldap_host']))
-            sudo('sed -i "s|LDAP_DN|{}|g" /etc/squid/squid.conf'.format(config['ldap_dn']))
-            sudo('sed -i "s|LDAP_SERVICE_USERNAME|{}|g" /etc/squid/squid.conf'.format(config['ldap_user']))
-            sudo('sed -i "s|LDAP_SERVICE_PASSWORD|{}|g" /etc/squid/squid.conf'.format(config['ldap_password']))
-            sudo('sed -i "s|LDAP_AUTH_PATH|{}|g" /etc/squid/squid.conf'.format('/usr/lib/squid/basic_ldap_auth'))
+            conn.put(template_file, '/tmp/squid.conf')
+            conn.sudo('\cp /tmp/squid.conf /etc/squid/squid.conf')
+            conn.sudo('sed -i "s|PROXY_SUBNET|{}|g" /etc/squid/squid.conf'.format(proxy_subnet))
+            conn.sudo('sed -i "s|EDGE_USER_NAME|{}|g" /etc/squid/squid.conf'.format(config['project_name']))
+            conn.sudo('sed -i "s|LDAP_HOST|{}|g" /etc/squid/squid.conf'.format(config['ldap_host']))
+            conn.sudo('sed -i "s|LDAP_DN|{}|g" /etc/squid/squid.conf'.format(config['ldap_dn']))
+            conn.sudo('sed -i "s|LDAP_SERVICE_USERNAME|{}|g" /etc/squid/squid.conf'.format(config['ldap_user']))
+            conn.sudo('sed -i "s|LDAP_SERVICE_PASSWORD|{}|g" /etc/squid/squid.conf'.format(config['ldap_password']))
+            conn.sudo('sed -i "s|LDAP_AUTH_PATH|{}|g" /etc/squid/squid.conf'.format('/usr/lib/squid/basic_ldap_auth'))
             replace_string = ''
             for cidr in config['vpc_cidrs']:
                 replace_string += 'acl AWS_VPC_CIDR dst {}\\n'.format(cidr)
-            sudo('sed -i "s|VPC_CIDRS|{}|g" /etc/squid/squid.conf'.format(replace_string))
+            conn.sudo('sed -i "s|VPC_CIDRS|{}|g" /etc/squid/squid.conf'.format(replace_string))
             replace_string = ''
             for cidr in config['allowed_ip_cidr']:
                 replace_string += 'acl AllowedCIDRS src {}\\n'.format(cidr)
-            sudo('sed -i "s|ALLOWED_CIDRS|{}|g" /etc/squid/squid.conf'.format(replace_string))
-            sudo('systemctl restart squid')
-            sudo('touch /tmp/http_proxy_ensured')
+            conn.sudo('sed -i "s|ALLOWED_CIDRS|{}|g" /etc/squid/squid.conf'.format(replace_string))
+            conn.sudo('systemctl restart squid')
+            conn.sudo('touch /tmp/http_proxy_ensured')
     except Exception as err:
         print("Failed to install and configure squid: " + str(err))
         sys.exit(1)
@@ -72,88 +72,88 @@ def install_nginx_lua(edge_ip, nginx_version, keycloak_auth_server_url, keycloak
                        'libtool-bin zip readline-doc perl curl liblua5.1-0 liblua5.1-0-dev lua5.1')
             manage_pkg('-y install --no-install-recommends', 'remote', 'wget gnupg ca-certificates')
             if os.environ['conf_stepcerts_enabled'] == 'true':
-                sudo('mkdir -p /home/{0}/keys'.format(user))
-                sudo('''bash -c 'echo "{0}" | base64 --decode > /etc/ssl/certs/root_ca.crt' '''.format(
+                conn.sudo('mkdir -p /home/{0}/keys'.format(user))
+                conn.sudo('''bash -c 'echo "{0}" | base64 --decode > /etc/ssl/certs/root_ca.crt' '''.format(
                      os.environ['conf_stepcerts_root_ca']))
-                fingerprint = sudo('step certificate fingerprint /etc/ssl/certs/root_ca.crt')
-                sudo('step ca bootstrap --fingerprint {0} --ca-url "{1}"'.format(fingerprint,
+                fingerprint = conn.sudo('step certificate fingerprint /etc/ssl/certs/root_ca.crt')
+                conn.sudo('step ca bootstrap --fingerprint {0} --ca-url "{1}"'.format(fingerprint,
                                                                                  os.environ['conf_stepcerts_ca_url']))
-                sudo('echo "{0}" > /home/{1}/keys/provisioner_password'.format(
+                conn.sudo('echo "{0}" > /home/{1}/keys/provisioner_password'.format(
                      os.environ['conf_stepcerts_kid_password'], user))
                 sans = "--san localhost --san 127.0.0.1 {0}".format(step_cert_sans)
                 cn = edge_ip
-                sudo('step ca token {3} --kid {0} --ca-url "{1}" --root /etc/ssl/certs/root_ca.crt '
+                conn.sudo('step ca token {3} --kid {0} --ca-url "{1}" --root /etc/ssl/certs/root_ca.crt '
                      '--password-file /home/{2}/keys/provisioner_password {4} --output-file /tmp/step_token'.format(
                     os.environ['conf_stepcerts_kid'], os.environ['conf_stepcerts_ca_url'], user, cn, sans))
-                token = sudo('cat /tmp/step_token')
-                sudo('step ca certificate "{0}" /etc/ssl/certs/datalab.crt /etc/ssl/certs/datalab.key '
+                token = conn.sudo('cat /tmp/step_token')
+                conn.sudo('step ca certificate "{0}" /etc/ssl/certs/datalab.crt /etc/ssl/certs/datalab.key '
                      '--token "{1}" --kty=RSA --size 2048 --provisioner {2} '.format(cn, token,
                                                                                      os.environ['conf_stepcerts_kid']))
-                sudo('touch /var/log/renew_certificates.log')
-                put('/root/templates/manage_step_certs.sh', '/usr/local/bin/manage_step_certs.sh', use_sudo=True)
-                sudo('chmod +x /usr/local/bin/manage_step_certs.sh')
-                sudo('sed -i "s|STEP_ROOT_CERT_PATH|/etc/ssl/certs/root_ca.crt|g" '
+                conn.sudo('touch /var/log/renew_certificates.log')
+                conn.put('/root/templates/manage_step_certs.sh', '/usr/local/bin/manage_step_certs.sh', use_sudo=True)
+                conn.sudo('chmod +x /usr/local/bin/manage_step_certs.sh')
+                conn.sudo('sed -i "s|STEP_ROOT_CERT_PATH|/etc/ssl/certs/root_ca.crt|g" '
                      '/usr/local/bin/manage_step_certs.sh')
-                sudo('sed -i "s|STEP_CERT_PATH|/etc/ssl/certs/datalab.crt|g" /usr/local/bin/manage_step_certs.sh')
-                sudo('sed -i "s|STEP_KEY_PATH|/etc/ssl/certs/datalab.key|g" /usr/local/bin/manage_step_certs.sh')
-                sudo('sed -i "s|STEP_CA_URL|{0}|g" /usr/local/bin/manage_step_certs.sh'.format(
+                conn.sudo('sed -i "s|STEP_CERT_PATH|/etc/ssl/certs/datalab.crt|g" /usr/local/bin/manage_step_certs.sh')
+                conn.sudo('sed -i "s|STEP_KEY_PATH|/etc/ssl/certs/datalab.key|g" /usr/local/bin/manage_step_certs.sh')
+                conn.sudo('sed -i "s|STEP_CA_URL|{0}|g" /usr/local/bin/manage_step_certs.sh'.format(
                     os.environ['conf_stepcerts_ca_url']))
-                sudo('sed -i "s|RESOURCE_TYPE|edge|g" /usr/local/bin/manage_step_certs.sh')
-                sudo('sed -i "s|SANS|{0}|g" /usr/local/bin/manage_step_certs.sh'.format(sans))
-                sudo('sed -i "s|CN|{0}|g" /usr/local/bin/manage_step_certs.sh'.format(cn))
-                sudo('sed -i "s|KID|{0}|g" /usr/local/bin/manage_step_certs.sh'.format(
+                conn.sudo('sed -i "s|RESOURCE_TYPE|edge|g" /usr/local/bin/manage_step_certs.sh')
+                conn.sudo('sed -i "s|SANS|{0}|g" /usr/local/bin/manage_step_certs.sh'.format(sans))
+                conn.sudo('sed -i "s|CN|{0}|g" /usr/local/bin/manage_step_certs.sh'.format(cn))
+                conn.sudo('sed -i "s|KID|{0}|g" /usr/local/bin/manage_step_certs.sh'.format(
                     os.environ['conf_stepcerts_kid']))
-                sudo('sed -i "s|STEP_PROVISIONER_PASSWORD_PATH|/home/{0}/keys/provisioner_password|g" '
+                conn.sudo('sed -i "s|STEP_PROVISIONER_PASSWORD_PATH|/home/{0}/keys/provisioner_password|g" '
                      '/usr/local/bin/manage_step_certs.sh'.format(user))
-                sudo('bash -c \'echo "0 * * * * root /usr/local/bin/manage_step_certs.sh >> '
+                conn.sudo('bash -c \'echo "0 * * * * root /usr/local/bin/manage_step_certs.sh >> '
                      '/var/log/renew_certificates.log 2>&1" >> /etc/crontab \'')
-                put('/root/templates/step-cert-manager.service', '/etc/systemd/system/step-cert-manager.service',
+                conn.put('/root/templates/step-cert-manager.service', '/etc/systemd/system/step-cert-manager.service',
                     use_sudo=True)
-                sudo('systemctl daemon-reload')
-                sudo('systemctl enable step-cert-manager.service')
+                conn.sudo('systemctl daemon-reload')
+                conn.sudo('systemctl enable step-cert-manager.service')
             else:
-                sudo('openssl req -x509 -nodes -days 3650 -newkey rsa:2048 -keyout /etc/ssl/certs/datalab.key \
+                conn.sudo('openssl req -x509 -nodes -days 3650 -newkey rsa:2048 -keyout /etc/ssl/certs/datalab.key \
                      -out /etc/ssl/certs/datalab.crt -subj "/C=US/ST=US/L=US/O=datalab/CN={}"'.format(hostname))
 
-            sudo('mkdir -p /tmp/src')
+            conn.sudo('mkdir -p /tmp/src')
             with cd('/tmp/src/'):
-                sudo('wget https://luarocks.org/releases/luarocks-3.3.1.tar.gz')
-                sudo('tar -xzf luarocks-3.3.1.tar.gz')
+                conn.sudo('wget https://luarocks.org/releases/luarocks-3.3.1.tar.gz')
+                conn.sudo('tar -xzf luarocks-3.3.1.tar.gz')
 
-            sudo('wget -O - https://openresty.org/package/pubkey.gpg | sudo apt-key add -')
-            sudo('add-apt-repository -y "deb http://openresty.org/package/ubuntu $(lsb_release -sc) main"')
-            sudo('apt-get update')
-            sudo('apt-get -y install openresty=1.19.3.1-1~focal1')
+            conn.sudo('wget -O - https://openresty.org/package/pubkey.gpg | sudo apt-key add -')
+            conn.sudo('add-apt-repository -y "deb http://openresty.org/package/ubuntu $(lsb_release -sc) main"')
+            conn.sudo('apt-get update')
+            conn.sudo('apt-get -y install openresty=1.19.3.1-1~focal1')
 
             with cd('/tmp/src/luarocks-3.3.1/'):
-                sudo('./configure')
-                sudo('make install')
-                sudo('luarocks install lua-resty-jwt 0.2.2 --tree /usr/local/openresty/lualib/resty/')
-                sudo('luarocks install lua-resty-openidc --tree /usr/local/openresty/lualib/resty/')
+                conn.sudo('./configure')
+                conn.sudo('make install')
+                conn.sudo('luarocks install lua-resty-jwt 0.2.2 --tree /usr/local/openresty/lualib/resty/')
+                conn.sudo('luarocks install lua-resty-openidc --tree /usr/local/openresty/lualib/resty/')
 
-            sudo('luarocks install lua-resty-jwt 0.2.2')
-            sudo('luarocks install lua-resty-openidc')
+            conn.sudo('luarocks install lua-resty-jwt 0.2.2')
+            conn.sudo('luarocks install lua-resty-openidc')
 
-            sudo('useradd -r nginx')
+            conn.sudo('useradd -r nginx')
 
-            sudo('mkdir -p /opt/datalab/templates')
-            put('/root/templates', '/opt/datalab', use_sudo=True)
-            sudo('sed -i \'s/EDGE_IP/{}/g\' /opt/datalab/templates/conf.d/proxy.conf'.format(edge_ip))
-            sudo('sed -i \'s|KEYCLOAK_AUTH_URL|{}|g\' /opt/datalab/templates/conf.d/proxy.conf'.format(
+            conn.sudo('mkdir -p /opt/datalab/templates')
+            conn.put('/root/templates', '/opt/datalab', use_sudo=True)
+            conn.sudo('sed -i \'s/EDGE_IP/{}/g\' /opt/datalab/templates/conf.d/proxy.conf'.format(edge_ip))
+            conn.sudo('sed -i \'s|KEYCLOAK_AUTH_URL|{}|g\' /opt/datalab/templates/conf.d/proxy.conf'.format(
                 keycloak_auth_server_url))
-            sudo('sed -i \'s/KEYCLOAK_REALM_NAME/{}/g\' /opt/datalab/templates/conf.d/proxy.conf'.format(
+            conn.sudo('sed -i \'s/KEYCLOAK_REALM_NAME/{}/g\' /opt/datalab/templates/conf.d/proxy.conf'.format(
                 keycloak_realm_name))
-            sudo('sed -i \'s/KEYCLOAK_CLIENT_ID/{}/g\' /opt/datalab/templates/conf.d/proxy.conf'.format(
+            conn.sudo('sed -i \'s/KEYCLOAK_CLIENT_ID/{}/g\' /opt/datalab/templates/conf.d/proxy.conf'.format(
                 keycloak_client_id))
-            sudo('sed -i \'s/KEYCLOAK_CLIENT_SECRET/{}/g\' /opt/datalab/templates/conf.d/proxy.conf'.format(
+            conn.sudo('sed -i \'s/KEYCLOAK_CLIENT_SECRET/{}/g\' /opt/datalab/templates/conf.d/proxy.conf'.format(
                 keycloak_client_secret))
 
-            sudo('cp /opt/datalab/templates/nginx.conf /usr/local/openresty/nginx/conf')
-            sudo('mkdir /usr/local/openresty/nginx/conf/conf.d')
-            sudo('cp /opt/datalab/templates/conf.d/proxy.conf /usr/local/openresty/nginx/conf/conf.d/')
-            sudo('mkdir /usr/local/openresty/nginx/conf/locations')
-            sudo('systemctl start openresty')
-            sudo('touch /tmp/nginx_installed')
+            conn.sudo('cp /opt/datalab/templates/nginx.conf /usr/local/openresty/nginx/conf')
+            conn.sudo('mkdir /usr/local/openresty/nginx/conf/conf.d')
+            conn.sudo('cp /opt/datalab/templates/conf.d/proxy.conf /usr/local/openresty/nginx/conf/conf.d/')
+            conn.sudo('mkdir /usr/local/openresty/nginx/conf/locations')
+            conn.sudo('systemctl start openresty')
+            conn.sudo('touch /tmp/nginx_installed')
             if os.environ['conf_letsencrypt_enabled'] == 'true':
                 print("Configuring letsencrypt certificates.")
                 install_certbot(os.environ['conf_os_family'])
@@ -170,21 +170,21 @@ def configure_nftables(config):
     try:
         if not exists('/tmp/nftables_ensured'):
             manage_pkg('-y install', 'remote', 'nftables')
-            sudo('systemctl enable nftables.service')
-            sudo('systemctl start nftables')
-            sudo('sysctl net.ipv4.ip_forward=1')
+            conn.sudo('systemctl enable nftables.service')
+            conn.sudo('systemctl start nftables')
+            conn.sudo('sysctl net.ipv4.ip_forward=1')
             if os.environ['conf_cloud_provider'] == 'aws':
                 interface = 'eth0'
             elif os.environ['conf_cloud_provider'] == 'gcp':
                 interface = 'ens4'
-            sudo('sed -i \'s/#net.ipv4.ip_forward=1/net.ipv4.ip_forward=1/g\' /etc/sysctl.conf')
-            sudo('sed -i \'s/EDGE_IP/{}/g\' /opt/datalab/templates/nftables.conf'.format(config['edge_ip']))
-            sudo('sed -i "s|INTERFACE|{}|g" /opt/datalab/templates/nftables.conf'.format(interface))
-            sudo(
+            conn.sudo('sed -i \'s/#net.ipv4.ip_forward=1/net.ipv4.ip_forward=1/g\' /etc/sysctl.conf')
+            conn.sudo('sed -i \'s/EDGE_IP/{}/g\' /opt/datalab/templates/nftables.conf'.format(config['edge_ip']))
+            conn.sudo('sed -i "s|INTERFACE|{}|g" /opt/datalab/templates/nftables.conf'.format(interface))
+            conn.sudo(
                 'sed -i "s|SUBNET_CIDR|{}|g" /opt/datalab/templates/nftables.conf'.format(config['exploratory_subnet']))
-            sudo('cp /opt/datalab/templates/nftables.conf /etc/')
-            sudo('systemctl restart nftables')
-            sudo('touch /tmp/nftables_ensured')
+            conn.sudo('cp /opt/datalab/templates/nftables.conf /etc/')
+            conn.sudo('systemctl restart nftables')
+            conn.sudo('touch /tmp/nftables_ensured')
     except Exception as err:
         print("Failed to configure nftables: " + str(err))
         sys.exit(1)
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/general/lib/os/debian/notebook_lib.py b/infrastructure-provisioning/src/general/lib/os/debian/notebook_lib.py
index 94cbd78..931b204 100644
--- a/infrastructure-provisioning/src/general/lib/os/debian/notebook_lib.py
+++ b/infrastructure-provisioning/src/general/lib/os/debian/notebook_lib.py
@@ -36,14 +36,14 @@ def enable_proxy(proxy_host, proxy_port):
     try:
         proxy_string = "http://%s:%s" % (proxy_host, proxy_port)
         proxy_https_string = "http://%s:%s" % (proxy_host, proxy_port)
-        sudo('sed -i "/^export http_proxy/d" /etc/profile')
-        sudo('sed -i "/^export https_proxy/d" /etc/profile')
-        sudo('echo export http_proxy=' + proxy_string + ' >> /etc/profile')
-        sudo('echo export https_proxy=' + proxy_string + ' >> /etc/profile')
+        conn.sudo('sed -i "/^export http_proxy/d" /etc/profile')
+        conn.sudo('sed -i "/^export https_proxy/d" /etc/profile')
+        conn.sudo('echo export http_proxy=' + proxy_string + ' >> /etc/profile')
+        conn.sudo('echo export https_proxy=' + proxy_string + ' >> /etc/profile')
         if exists('/etc/apt/apt.conf'):
-            sudo("sed -i '/^Acquire::http::Proxy/d' /etc/apt/apt.conf")
-        sudo("echo 'Acquire::http::Proxy \"" + proxy_string + "\";' >> /etc/apt/apt.conf")
-        sudo("echo 'Acquire::http::Proxy \"" + proxy_https_string + "\";' >> /etc/apt/apt.conf")
+            conn.sudo("sed -i '/^Acquire::http::Proxy/d' /etc/apt/apt.conf")
+        conn.sudo("echo 'Acquire::http::Proxy \"" + proxy_string + "\";' >> /etc/apt/apt.conf")
+        conn.sudo("echo 'Acquire::http::Proxy \"" + proxy_https_string + "\";' >> /etc/apt/apt.conf")
 
         print("Renewing gpg key")
         renew_gpg_key()
@@ -54,27 +54,27 @@ def enable_proxy(proxy_host, proxy_port):
 def ensure_r_local_kernel(spark_version, os_user, templates_dir, kernels_dir):
     if not exists('/home/' + os_user + '/.ensure_dir/r_local_kernel_ensured'):
         try:
-            sudo('R -e "IRkernel::installspec()"')
-            r_version = sudo("R --version | awk '/version / {print $3}'")
-            put(templates_dir + 'r_template.json', '/tmp/r_template.json')
-            sudo('sed -i "s|R_VER|' + r_version + '|g" /tmp/r_template.json')
-            sudo('sed -i "s|SP_VER|' + spark_version + '|g" /tmp/r_template.json')
-            sudo('\cp -f /tmp/r_template.json {}/ir/kernel.json'.format(kernels_dir))
-            sudo('ln -s /opt/spark/ /usr/local/spark')
+            conn.sudo('R -e "IRkernel::installspec()"')
+            r_version = conn.sudo("R --version | awk '/version / {print $3}'")
+            conn.put(templates_dir + 'r_template.json', '/tmp/r_template.json')
+            conn.sudo('sed -i "s|R_VER|' + r_version + '|g" /tmp/r_template.json')
+            conn.sudo('sed -i "s|SP_VER|' + spark_version + '|g" /tmp/r_template.json')
+            conn.sudo('\cp -f /tmp/r_template.json {}/ir/kernel.json'.format(kernels_dir))
+            conn.sudo('ln -s /opt/spark/ /usr/local/spark')
             try:
-                sudo('cd /usr/local/spark/R/lib/SparkR; R -e "install.packages(\'roxygen2\',repos=\'https://cloud.r-project.org\')" R -e "devtools::check(\'.\')"')
+                conn.sudo('cd /usr/local/spark/R/lib/SparkR; R -e "install.packages(\'roxygen2\',repos=\'https://cloud.r-project.org\')" R -e "devtools::check(\'.\')"')
             except:
                 pass
-            sudo('cd /usr/local/spark/R/lib/SparkR; R -e "devtools::install(\'.\')"')
-            sudo('chown -R ' + os_user + ':' + os_user + ' /home/' + os_user + '/.local')
-            sudo('touch /home/' + os_user + '/.ensure_dir/r_local_kernel_ensured')
+            conn.sudo('cd /usr/local/spark/R/lib/SparkR; R -e "devtools::install(\'.\')"')
+            conn.sudo('chown -R ' + os_user + ':' + os_user + ' /home/' + os_user + '/.local')
+            conn.sudo('touch /home/' + os_user + '/.ensure_dir/r_local_kernel_ensured')
         except:
             sys.exit(1)
 
 @backoff.on_exception(backoff.expo, SystemExit, max_tries=20)
 def add_marruter_key():
     try:
-        sudo('add-apt-repository -y ppa:marutter/rrutter')
+        conn.sudo('add-apt-repository -y ppa:marutter/rrutter')
     except:
         sys.exit(1)
 
@@ -86,16 +86,16 @@ def ensure_r(os_user, r_libs, region, r_mirror):
             else:
                 r_repository = 'https://cloud.r-project.org'
             #add_marruter_key()
-            sudo('apt update')
+            conn.sudo('apt update')
             manage_pkg('-yV install', 'remote', 'libssl-dev libcurl4-gnutls-dev libgit2-dev libxml2-dev libreadline-dev')
             manage_pkg('-y install', 'remote', 'cmake')
-            sudo('apt-key adv --keyserver keyserver.ubuntu.com --recv-keys E298A3A825C0D65DFD57CBB651716619E084DAB9')
-            sudo("add-apt-repository 'deb https://cloud.r-project.org/bin/linux/ubuntu focal-cran40/'")
+            conn.sudo('apt-key adv --keyserver keyserver.ubuntu.com --recv-keys E298A3A825C0D65DFD57CBB651716619E084DAB9')
+            conn.sudo("add-apt-repository 'deb https://cloud.r-project.org/bin/linux/ubuntu focal-cran40/'")
             manage_pkg('update', 'remote', '')
             manage_pkg('-y install', 'remote', 'r-base r-base-dev')
-            sudo('R CMD javareconf')
-            sudo('cd /root; git clone https://github.com/zeromq/zeromq4-x.git; cd zeromq4-x/; mkdir build; cd build; cmake ..; make install; ldconfig')
-            sudo('R -e "install.packages(\'devtools\',repos=\'{}\')"'.format(r_repository))
+            conn.sudo('R CMD javareconf')
+            conn.sudo('cd /root; git clone https://github.com/zeromq/zeromq4-x.git; cd zeromq4-x/; mkdir build; cd build; cmake ..; make install; ldconfig')
+            conn.sudo('R -e "install.packages(\'devtools\',repos=\'{}\')"'.format(r_repository))
             for i in r_libs:
                 if '=' in i:
                     name = i.split('=')[0]
@@ -103,17 +103,17 @@ def ensure_r(os_user, r_libs, region, r_mirror):
                 else:
                     name = i
                     vers = ''
-                sudo('R -e \'devtools::install_version("{}", version = {}, repos ="{}", dependencies = NA)\''.format(name, vers, r_repository))
+                conn.sudo('R -e \'devtools::install_version("{}", version = {}, repos ="{}", dependencies = NA)\''.format(name, vers, r_repository))
                 #sudo('R -e "install.packages(\'{}\',repos=\'{}\')"'.format(i, r_repository))
-            sudo('R -e "library(\'devtools\');install.packages(repos=\'{}\',c(\'rzmq\',\'repr\',\'digest\',\'stringr\',\'RJSONIO\',\'functional\',\'plyr\'))"'.format(r_repository))
+            conn.sudo('R -e "library(\'devtools\');install.packages(repos=\'{}\',c(\'rzmq\',\'repr\',\'digest\',\'stringr\',\'RJSONIO\',\'functional\',\'plyr\'))"'.format(r_repository))
             try:
-                sudo('R -e "library(\'devtools\');install_github(\'IRkernel/repr\');install_github(\'IRkernel/IRdisplay\');install_github(\'IRkernel/IRkernel\');"')
+                conn.sudo('R -e "library(\'devtools\');install_github(\'IRkernel/repr\');install_github(\'IRkernel/IRdisplay\');install_github(\'IRkernel/IRkernel\');"')
             except:
-                sudo('R -e "options(download.file.method = "wget");library(\'devtools\');install_github(\'IRkernel/repr\');install_github(\'IRkernel/IRdisplay\');install_github(\'IRkernel/IRkernel\');"')
+                conn.sudo('R -e "options(download.file.method = "wget");library(\'devtools\');install_github(\'IRkernel/repr\');install_github(\'IRkernel/IRdisplay\');install_github(\'IRkernel/IRkernel\');"')
             if os.environ['application'] == 'tensor-rstudio':
-                sudo('R -e "library(\'devtools\');install_version(\'keras\', version = \'{}\', repos = \'{}\');"'.format(os.environ['notebook_keras_version'],r_repository))
-            sudo('R -e "install.packages(\'RJDBC\',repos=\'{}\',dep=TRUE)"'.format(r_repository))
-            sudo('touch /home/' + os_user + '/.ensure_dir/r_ensured')
+                conn.sudo('R -e "library(\'devtools\');install_version(\'keras\', version = \'{}\', repos = \'{}\');"'.format(os.environ['notebook_keras_version'],r_repository))
+            conn.sudo('R -e "install.packages(\'RJDBC\',repos=\'{}\',dep=TRUE)"'.format(r_repository))
+            conn.sudo('touch /home/' + os_user + '/.ensure_dir/r_ensured')
         except:
             sys.exit(1)
 
@@ -123,46 +123,46 @@ def install_rstudio(os_user, local_spark_path, rstudio_pass, rstudio_version):
         try:
             manage_pkg('-y install', 'remote', 'r-base')
             manage_pkg('-y install', 'remote', 'gdebi-core')
-            sudo('wget https://download2.rstudio.org/server/bionic/amd64/rstudio-server-{}-amd64.deb'.format(rstudio_version))
-            sudo('gdebi -n rstudio-server-{}-amd64.deb'.format(rstudio_version))
-            sudo('mkdir -p /mnt/var')
-            sudo('chown {0}:{0} /mnt/var'.format(os_user))
-            http_proxy = run('echo $http_proxy')
-            https_proxy = run('echo $https_proxy')
-            sudo("sed -i '/Type=forking/a \Environment=USER=datalab-user' /lib/systemd/system/rstudio-server.service")
-            sudo(
+            conn.sudo('wget https://download2.rstudio.org/server/bionic/amd64/rstudio-server-{}-amd64.deb'.format(rstudio_version))
+            conn.sudo('gdebi -n rstudio-server-{}-amd64.deb'.format(rstudio_version))
+            conn.sudo('mkdir -p /mnt/var')
+            conn.sudo('chown {0}:{0} /mnt/var'.format(os_user))
+            http_proxy = conn.run('echo $http_proxy')
+            https_proxy = conn.run('echo $https_proxy')
+            conn.sudo("sed -i '/Type=forking/a \Environment=USER=datalab-user' /lib/systemd/system/rstudio-server.service")
+            conn.sudo(
                 "sed -i '/ExecStart/s|=/usr/lib/rstudio-server/bin/rserver|=/bin/bash -c \"export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/opt/cudnn/lib64:/usr/local/cuda/lib64; /usr/lib/rstudio-server/bin/rserver --auth-none 1|g' /lib/systemd/system/rstudio-server.service")
-            sudo("sed -i '/ExecStart/s|$|\"|g' /lib/systemd/system/rstudio-server.service")
-            sudo(
+            conn.sudo("sed -i '/ExecStart/s|$|\"|g' /lib/systemd/system/rstudio-server.service")
+            conn.sudo(
                 'sed -i \'/\[Service\]/a Environment=\"HTTP_PROXY={}\"\'  /lib/systemd/system/rstudio-server.service'.format(
                     http_proxy))
-            sudo(
+            conn.sudo(
                 'sed -i \'/\[Service\]/a Environment=\"HTTPS_PROXY={}\"\'  /lib/systemd/system/rstudio-server.service'.format(
                     https_proxy))
-            java_home = run("update-alternatives --query java | grep -o \'/.*/java-8.*/jre\'").splitlines()[0]
-            sudo('sed -i \'/\[Service\]/ a\Environment=\"JAVA_HOME={}\"\'  /lib/systemd/system/rstudio-server.service'.format(
+            java_home = conn.run("update-alternatives --query java | grep -o \'/.*/java-8.*/jre\'").splitlines()[0]
+            conn.sudo('sed -i \'/\[Service\]/ a\Environment=\"JAVA_HOME={}\"\'  /lib/systemd/system/rstudio-server.service'.format(
                 java_home))
-            sudo("systemctl daemon-reload")
-            sudo('touch /home/{}/.Renviron'.format(os_user))
-            sudo('chown {0}:{0} /home/{0}/.Renviron'.format(os_user))
-            sudo('''echo 'SPARK_HOME="{0}"' >> /home/{1}/.Renviron'''.format(local_spark_path, os_user))
-            sudo('''echo 'JAVA_HOME="{0}"' >> /home/{1}/.Renviron'''.format(java_home, os_user))
-            sudo('touch /home/{}/.Rprofile'.format(os_user))
-            sudo('chown {0}:{0} /home/{0}/.Rprofile'.format(os_user))
-            sudo('''echo 'library(SparkR, lib.loc = c(file.path(Sys.getenv("SPARK_HOME"), "R", "lib")))' >> /home/{}/.Rprofile'''.format(os_user))
-            sudo('''echo 'Sys.setenv(http_proxy = \"{}\")' >> /home/{}/.Rprofile'''.format(http_proxy, os_user))
-            sudo('''echo 'Sys.setenv(https_proxy = \"{}\")' >> /home/{}/.Rprofile'''.format(https_proxy, os_user))
-            sudo('rstudio-server start')
-            sudo('echo "{0}:{1}" | chpasswd'.format(os_user, rstudio_pass))
+            conn.sudo("systemctl daemon-reload")
+            conn.sudo('touch /home/{}/.Renviron'.format(os_user))
+            conn.sudo('chown {0}:{0} /home/{0}/.Renviron'.format(os_user))
+            conn.sudo('''echo 'SPARK_HOME="{0}"' >> /home/{1}/.Renviron'''.format(local_spark_path, os_user))
+            conn.sudo('''echo 'JAVA_HOME="{0}"' >> /home/{1}/.Renviron'''.format(java_home, os_user))
+            conn.sudo('touch /home/{}/.Rprofile'.format(os_user))
+            conn.sudo('chown {0}:{0} /home/{0}/.Rprofile'.format(os_user))
+            conn.sudo('''echo 'library(SparkR, lib.loc = c(file.path(Sys.getenv("SPARK_HOME"), "R", "lib")))' >> /home/{}/.Rprofile'''.format(os_user))
+            conn.sudo('''echo 'Sys.setenv(http_proxy = \"{}\")' >> /home/{}/.Rprofile'''.format(http_proxy, os_user))
+            conn.sudo('''echo 'Sys.setenv(https_proxy = \"{}\")' >> /home/{}/.Rprofile'''.format(https_proxy, os_user))
+            conn.sudo('rstudio-server start')
+            conn.sudo('echo "{0}:{1}" | chpasswd'.format(os_user, rstudio_pass))
             #sudo("sed -i '/exit 0/d' /etc/rc.local")
             #sudo('''bash -c "echo \'sed -i 's/^#SPARK_HOME/SPARK_HOME/' /home/{}/.Renviron\' >> /etc/rc.local"'''.format(os_user))
             #sudo("bash -c 'echo exit 0 >> /etc/rc.local'")
-            sudo('touch /home/{}/.ensure_dir/rstudio_ensured'.format(os_user))
+            conn.sudo('touch /home/{}/.ensure_dir/rstudio_ensured'.format(os_user))
         except:
             sys.exit(1)
     else:
         try:
-            sudo('echo "{0}:{1}" | chpasswd'.format(os_user, rstudio_pass))
+            conn.sudo('echo "{0}:{1}" | chpasswd'.format(os_user, rstudio_pass))
         except:
             sys.exit(1)
 
@@ -170,31 +170,31 @@ def install_rstudio(os_user, local_spark_path, rstudio_pass, rstudio_version):
 def ensure_matplot(os_user):
     if not exists('/home/' + os_user + '/.ensure_dir/matplot_ensured'):
         try:
-            sudo("sudo sed -i~orig -e 's/# deb-src/deb-src/' /etc/apt/sources.list")
+            conn.sudo("sudo sed -i~orig -e 's/# deb-src/deb-src/' /etc/apt/sources.list")
             manage_pkg('update', 'remote', '')
             manage_pkg('-y build-dep', 'remote', 'python3-matplotlib')
-            sudo('pip3 install matplotlib==2.0.2 --no-cache-dir')
+            conn.sudo('pip3 install matplotlib==2.0.2 --no-cache-dir')
             if os.environ['application'] in ('tensor', 'deeplearning'):
-                sudo('python3.8 -m pip install -U numpy=={} --no-cache-dir'.format(os.environ['notebook_numpy_version']))
-            sudo('touch /home/' + os_user + '/.ensure_dir/matplot_ensured')
+                conn.sudo('python3.8 -m pip install -U numpy=={} --no-cache-dir'.format(os.environ['notebook_numpy_version']))
+            conn.sudo('touch /home/' + os_user + '/.ensure_dir/matplot_ensured')
         except:
             sys.exit(1)
 
 @backoff.on_exception(backoff.expo, SystemExit, max_tries=10)
 def add_sbt_key():
-    sudo(
+    conn.sudo(
         'curl -sL "https://keyserver.ubuntu.com/pks/lookup?op=get&search=0x2EE0EA64E40A89B84B2DF73499E82A75642AC823" | sudo apt-key add')
 
 def ensure_sbt(os_user):
     if not exists('/home/' + os_user + '/.ensure_dir/sbt_ensured'):
         try:
             manage_pkg('-y install', 'remote', 'apt-transport-https')
-            sudo('echo "deb https://dl.bintray.com/sbt/debian /" | sudo tee -a /etc/apt/sources.list.d/sbt.list')
+            conn.sudo('echo "deb https://dl.bintray.com/sbt/debian /" | sudo tee -a /etc/apt/sources.list.d/sbt.list')
 
             add_sbt_key()
             manage_pkg('update', 'remote', '')
             manage_pkg('-y install', 'remote', 'sbt')
-            sudo('touch /home/' + os_user + '/.ensure_dir/sbt_ensured')
+            conn.sudo('touch /home/' + os_user + '/.ensure_dir/sbt_ensured')
         except:
             sys.exit(1)
 
@@ -202,9 +202,9 @@ def ensure_sbt(os_user):
 def ensure_scala(scala_link, scala_version, os_user):
     if not exists('/home/' + os_user + '/.ensure_dir/scala_ensured'):
         try:
-            sudo('wget {}scala-{}.deb -O /tmp/scala.deb'.format(scala_link, scala_version))
-            sudo('dpkg -i /tmp/scala.deb')
-            sudo('touch /home/' + os_user + '/.ensure_dir/scala_ensured')
+            conn.sudo('wget {}scala-{}.deb -O /tmp/scala.deb'.format(scala_link, scala_version))
+            conn.sudo('dpkg -i /tmp/scala.deb')
+            conn.sudo('touch /home/' + os_user + '/.ensure_dir/scala_ensured')
         except:
             sys.exit(1)
 
@@ -216,7 +216,7 @@ def ensure_jre_jdk(os_user):
             manage_pkg('-y install', 'remote', 'default-jdk')
             manage_pkg('-y install', 'remote', 'openjdk-8-jdk')
             manage_pkg('-y install', 'remote', 'openjdk-8-jre')
-            sudo('touch /home/' + os_user + '/.ensure_dir/jre_jdk_ensured')
+            conn.sudo('touch /home/' + os_user + '/.ensure_dir/jre_jdk_ensured')
         except:
             sys.exit(1)
 
@@ -226,10 +226,10 @@ def ensure_additional_python_libs(os_user):
         try:
             manage_pkg('-y install', 'remote', 'libjpeg8-dev zlib1g-dev')
             if os.environ['application'] in ('jupyter', 'zeppelin'):
-                sudo('pip3 install NumPy=={} SciPy pandas Sympy Pillow sklearn --no-cache-dir'.format(os.environ['notebook_numpy_version']))
+                conn.sudo('pip3 install NumPy=={} SciPy pandas Sympy Pillow sklearn --no-cache-dir'.format(os.environ['notebook_numpy_version']))
             if os.environ['application'] in ('tensor', 'deeplearning'):
-                sudo('pip3 install opencv-python h5py --no-cache-dir')
-            sudo('touch /home/' + os_user + '/.ensure_dir/additional_python_libs_ensured')
+                conn.sudo('pip3 install opencv-python h5py --no-cache-dir')
+            conn.sudo('touch /home/' + os_user + '/.ensure_dir/additional_python_libs_ensured')
         except:
             sys.exit(1)
 
@@ -239,9 +239,9 @@ def ensure_python3_specific_version(python3_version, os_user):
         try:
             if len(python3_version) < 4:
                 python3_version = python3_version + ".0"
-            sudo('wget https://www.python.org/ftp/python/{0}/Python-{0}.tgz'.format(python3_version))
-            sudo('tar xzf Python-{0}.tgz; cd Python-{0}; ./configure --prefix=/usr/local; make altinstall'.format(python3_version))
-            sudo('touch /home/' + os_user + '/.ensure_dir/python3_specific_version_ensured')
+            conn.sudo('wget https://www.python.org/ftp/python/{0}/Python-{0}.tgz'.format(python3_version))
+            conn.sudo('tar xzf Python-{0}.tgz; cd Python-{0}; ./configure --prefix=/usr/local; make altinstall'.format(python3_version))
+            conn.sudo('touch /home/' + os_user + '/.ensure_dir/python3_specific_version_ensured')
         except:
             sys.exit(1)
 
@@ -251,18 +251,18 @@ def ensure_python3_libraries(os_user):
             #manage_pkg('-y install', 'remote', 'python3-setuptools')
             manage_pkg('-y install', 'remote', 'python3-pip')
             manage_pkg('-y install', 'remote', 'libkrb5-dev')
-            sudo('pip3 install -U keyrings.alt backoff')
-            sudo('pip3 install setuptools=={}'.format(os.environ['notebook_setuptools_version']))
+            conn.sudo('pip3 install -U keyrings.alt backoff')
+            conn.sudo('pip3 install setuptools=={}'.format(os.environ['notebook_setuptools_version']))
             try:
-                sudo('pip3 install tornado=={0} ipython==7.9.0 ipykernel=={1} sparkmagic --no-cache-dir' \
+                conn.sudo('pip3 install tornado=={0} ipython==7.9.0 ipykernel=={1} sparkmagic --no-cache-dir' \
                      .format(os.environ['notebook_tornado_version'], os.environ['notebook_ipykernel_version']))
             except:
-                sudo('pip3 install tornado=={0} ipython==5.0.0 ipykernel=={1} sparkmagic --no-cache-dir' \
+                conn.sudo('pip3 install tornado=={0} ipython==5.0.0 ipykernel=={1} sparkmagic --no-cache-dir' \
                      .format(os.environ['notebook_tornado_version'], os.environ['notebook_ipykernel_version']))
-            sudo('pip3 install -U pip=={} --no-cache-dir'.format(os.environ['conf_pip_version']))
-            sudo('pip3 install boto3 --no-cache-dir')
-            sudo('pip3 install fabvenv fabric-virtualenv future --no-cache-dir')
-            sudo('touch /home/' + os_user + '/.ensure_dir/python3_libraries_ensured')
+            conn.sudo('pip3 install -U pip=={} --no-cache-dir'.format(os.environ['conf_pip_version']))
+            conn.sudo('pip3 install boto3 --no-cache-dir')
+            conn.sudo('pip3 install fabvenv fabric-virtualenv future --no-cache-dir')
+            conn.sudo('touch /home/' + os_user + '/.ensure_dir/python3_libraries_ensured')
         except:
             sys.exit(1)
 
@@ -273,9 +273,9 @@ def install_tensor(os_user, cuda_version, cuda_file_name,
     if not exists('/home/{}/.ensure_dir/tensor_ensured'.format(os_user)):
         try:
             # install nvidia drivers
-            sudo('echo "blacklist nouveau" >> /etc/modprobe.d/blacklist-nouveau.conf')
-            sudo('echo "options nouveau modeset=0" >> /etc/modprobe.d/blacklist-nouveau.conf')
-            sudo('update-initramfs -u')
+            conn.sudo('echo "blacklist nouveau" >> /etc/modprobe.d/blacklist-nouveau.conf')
+            conn.sudo('echo "options nouveau modeset=0" >> /etc/modprobe.d/blacklist-nouveau.conf')
+            conn.sudo('update-initramfs -u')
             with settings(warn_only=True):
                 reboot(wait=180)
             manage_pkg('-y install', 'remote', 'dkms libglvnd-dev')
@@ -284,50 +284,50 @@ def install_tensor(os_user, cuda_version, cuda_file_name,
                 manage_pkg('-y install', 'remote', 'linux-modules-`uname -r`')
             else:
                 # legacy support for old kernels
-                sudo('if [[ $(apt-cache search linux-image-`uname -r`) ]]; then apt-get -y '
+                conn.sudo('if [[ $(apt-cache search linux-image-`uname -r`) ]]; then apt-get -y '
                      'install linux-image-`uname -r`; else apt-get -y install linux-modules-`uname -r`; fi;')
-            sudo('wget http://us.download.nvidia.com/tesla/{0}/NVIDIA-Linux-x86_64-{0}.run -O '
+            conn.sudo('wget http://us.download.nvidia.com/tesla/{0}/NVIDIA-Linux-x86_64-{0}.run -O '
                  '/home/{1}/NVIDIA-Linux-x86_64-{0}.run'.format(nvidia_version, os_user))
-            sudo('/bin/bash /home/{0}/NVIDIA-Linux-x86_64-{1}.run -s --dkms'.format(os_user, nvidia_version))
-            sudo('rm -f /home/{0}/NVIDIA-Linux-x86_64-{1}.run'.format(os_user, nvidia_version))
+            conn.sudo('/bin/bash /home/{0}/NVIDIA-Linux-x86_64-{1}.run -s --dkms'.format(os_user, nvidia_version))
+            conn.sudo('rm -f /home/{0}/NVIDIA-Linux-x86_64-{1}.run'.format(os_user, nvidia_version))
             # install cuda
-            sudo('python3 -m pip install --upgrade pip=={0} wheel numpy=={1} --no-cache-dir'.format(
+            conn.sudo('python3 -m pip install --upgrade pip=={0} wheel numpy=={1} --no-cache-dir'.format(
                 os.environ['conf_pip_version'], os.environ['notebook_numpy_version']))
-            sudo('wget -P /opt http://developer.download.nvidia.com/compute/cuda/{0}/Prod/local_installers/{1}'.format(
+            conn.sudo('wget -P /opt http://developer.download.nvidia.com/compute/cuda/{0}/Prod/local_installers/{1}'.format(
                 cuda_version, cuda_file_name))
-            sudo('sh /opt/{} --silent --toolkit'.format(cuda_file_name))
-            sudo('mv /usr/local/cuda-{} /opt/'.format(cuda_version))
-            sudo('ln -s /opt/cuda-{0} /usr/local/cuda-{0}'.format(cuda_version))
-            sudo('rm -f /opt/{}'.format(cuda_file_name))
+            conn.sudo('sh /opt/{} --silent --toolkit'.format(cuda_file_name))
+            conn.sudo('mv /usr/local/cuda-{} /opt/'.format(cuda_version))
+            conn.sudo('ln -s /opt/cuda-{0} /usr/local/cuda-{0}'.format(cuda_version))
+            conn.sudo('rm -f /opt/{}'.format(cuda_file_name))
             # install cuDNN
             run('wget http://developer.download.nvidia.com/compute/redist/cudnn/v{0}/{1} -O /tmp/{1}'.format(
                 cudnn_version, cudnn_file_name))
             run('tar xvzf /tmp/{} -C /tmp'.format(cudnn_file_name))
-            sudo('mkdir -p /opt/cudnn/include')
-            sudo('mkdir -p /opt/cudnn/lib64')
-            sudo('mv /tmp/cuda/include/cudnn.h /opt/cudnn/include')
-            sudo('mv /tmp/cuda/lib64/libcudnn* /opt/cudnn/lib64')
-            sudo('chmod a+r /opt/cudnn/include/cudnn.h /opt/cudnn/lib64/libcudnn*')
+            conn.sudo('mkdir -p /opt/cudnn/include')
+            conn.sudo('mkdir -p /opt/cudnn/lib64')
+            conn.sudo('mv /tmp/cuda/include/cudnn.h /opt/cudnn/include')
+            conn.sudo('mv /tmp/cuda/lib64/libcudnn* /opt/cudnn/lib64')
+            conn.sudo('chmod a+r /opt/cudnn/include/cudnn.h /opt/cudnn/lib64/libcudnn*')
             run(
                 'echo "export LD_LIBRARY_PATH=\"$LD_LIBRARY_PATH:/opt/cudnn/lib64:/usr/local/cuda/lib64\"" >> ~/.bashrc')
             # install TensorFlow and run TensorBoard
-            # sudo('python2.7 -m pip install --upgrade https://storage.googleapis.com/tensorflow/linux/gpu/tensorflow_gpu-{}-cp27-none-linux_x86_64.whl --no-cache-dir'.format(tensorflow_version))
-            sudo('python3 -m pip install --upgrade tensorflow-gpu=={} --no-cache-dir'.format(tensorflow_version))
-            sudo('mkdir /var/log/tensorboard; chown {0}:{0} -R /var/log/tensorboard'.format(os_user))
-            put('{}tensorboard.service'.format(templates_dir), '/tmp/tensorboard.service')
-            sudo("sed -i 's|OS_USR|{}|' /tmp/tensorboard.service".format(os_user))
+            # conn.sudo('python2.7 -m pip install --upgrade https://storage.googleapis.com/tensorflow/linux/gpu/tensorflow_gpu-{}-cp27-none-linux_x86_64.whl --no-cache-dir'.format(tensorflow_version))
+            conn.sudo('python3 -m pip install --upgrade tensorflow-gpu=={} --no-cache-dir'.format(tensorflow_version))
+            conn.sudo('mkdir /var/log/tensorboard; chown {0}:{0} -R /var/log/tensorboard'.format(os_user))
+            conn.put('{}tensorboard.service'.format(templates_dir), '/tmp/tensorboard.service')
+            conn.sudo("sed -i 's|OS_USR|{}|' /tmp/tensorboard.service".format(os_user))
             http_proxy = run('echo $http_proxy')
             https_proxy = run('echo $https_proxy')
-            sudo('sed -i \'/\[Service\]/ a\Environment=\"HTTP_PROXY={}\"\'  /tmp/tensorboard.service'.format(
+            conn.sudo('sed -i \'/\[Service\]/ a\Environment=\"HTTP_PROXY={}\"\'  /tmp/tensorboard.service'.format(
                 http_proxy))
-            sudo('sed -i \'/\[Service\]/ a\Environment=\"HTTPS_PROXY={}\"\'  /tmp/tensorboard.service'.format(
+            conn.sudo('sed -i \'/\[Service\]/ a\Environment=\"HTTPS_PROXY={}\"\'  /tmp/tensorboard.service'.format(
                 https_proxy))
-            sudo("chmod 644 /tmp/tensorboard.service")
-            sudo('\cp /tmp/tensorboard.service /etc/systemd/system/')
-            sudo("systemctl daemon-reload")
-            sudo("systemctl enable tensorboard")
-            sudo("systemctl start tensorboard")
-            sudo('touch /home/{}/.ensure_dir/tensor_ensured'.format(os_user))
+            conn.sudo("chmod 644 /tmp/tensorboard.service")
+            conn.sudo('\cp /tmp/tensorboard.service /etc/systemd/system/')
+            conn.sudo("systemctl daemon-reload")
+            conn.sudo("systemctl enable tensorboard")
+            conn.sudo("systemctl start tensorboard")
+            conn.sudo('touch /home/{}/.ensure_dir/tensor_ensured'.format(os_user))
 
         except:
             sys.exit(1)
@@ -336,20 +336,20 @@ def install_tensor(os_user, cuda_version, cuda_file_name,
 def install_maven(os_user):
     if not exists('/home/' + os_user + '/.ensure_dir/maven_ensured'):
         manage_pkg('-y install', 'remote', 'maven')
-        sudo('touch /home/' + os_user + '/.ensure_dir/maven_ensured')
+        conn.sudo('touch /home/' + os_user + '/.ensure_dir/maven_ensured')
 
 def install_gcloud(os_user):
     if not exists('/home/' + os_user + '/.ensure_dir/gcloud_ensured'):
-        sudo('echo "deb [signed-by=/usr/share/keyrings/cloud.google.gpg] http://packages.cloud.google.com/apt cloud-sdk main" | sudo tee -a /etc/apt/sources.list.d/google-cloud-sdk.list')
-        sudo('curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | sudo apt-key --keyring /usr/share/keyrings/cloud.google.gpg add -')
+        conn.sudo('echo "deb [signed-by=/usr/share/keyrings/cloud.google.gpg] http://packages.cloud.google.com/apt cloud-sdk main" | sudo tee -a /etc/apt/sources.list.d/google-cloud-sdk.list')
+        conn.sudo('curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | sudo apt-key --keyring /usr/share/keyrings/cloud.google.gpg add -')
         manage_pkg('-y install', 'remote', 'google-cloud-sdk')
-        sudo('touch /home/' + os_user + '/.ensure_dir/gcloud_ensured')
+        conn.sudo('touch /home/' + os_user + '/.ensure_dir/gcloud_ensured')
 
 def install_livy_dependencies(os_user):
     if not exists('/home/' + os_user + '/.ensure_dir/livy_dependencies_ensured'):
         manage_pkg('-y install', 'remote', 'libkrb5-dev')
-        sudo('pip3 install cloudpickle requests requests-kerberos flake8 flaky pytest --no-cache-dir')
-        sudo('touch /home/' + os_user + '/.ensure_dir/livy_dependencies_ensured')
+        conn.sudo('pip3 install cloudpickle requests requests-kerberos flake8 flaky pytest --no-cache-dir')
+        conn.sudo('touch /home/' + os_user + '/.ensure_dir/livy_dependencies_ensured')
 
 
 def install_maven_emr(os_user):
@@ -367,9 +367,9 @@ def install_livy_dependencies_emr(os_user):
 
 def install_nodejs(os_user):
     if not exists('/home/{}/.ensure_dir/nodejs_ensured'.format(os_user)):
-        sudo('curl -sL https://deb.nodesource.com/setup_13.x | sudo -E bash -')
+        conn.sudo('curl -sL https://deb.nodesource.com/setup_13.x | sudo -E bash -')
         manage_pkg('-y install', 'remote', 'nodejs')
-        sudo('touch /home/{}/.ensure_dir/nodejs_ensured'.format(os_user))
+        conn.sudo('touch /home/{}/.ensure_dir/nodejs_ensured'.format(os_user))
 
 
 def install_os_pkg(requisites):
@@ -387,12 +387,12 @@ def install_os_pkg(requisites):
             else:
                 version = 'N/A'
                 os_pkg = os_pkg[0]
-            sudo('DEBIAN_FRONTEND=noninteractive apt-get -y install --allow-downgrades {0} 2>&1 | tee /tmp/tee.tmp; if ! grep -w -E "({1})" /tmp/tee.tmp > '
+            conn.sudo('DEBIAN_FRONTEND=noninteractive apt-get -y install --allow-downgrades {0} 2>&1 | tee /tmp/tee.tmp; if ! grep -w -E "({1})" /tmp/tee.tmp > '
                  '/tmp/os_install_{2}.log; then echo "" > /tmp/os_install_{2}.log;fi'.format(os_pkg, error_parser, name))
-            err = sudo('cat /tmp/os_install_{}.log'.format(name)).replace('"', "'")
-            sudo('cat /tmp/tee.tmp | if ! grep -w -E -A 30 "({1})" /tmp/tee.tmp > '
+            err = conn.sudo('cat /tmp/os_install_{}.log'.format(name)).replace('"', "'")
+            conn.sudo('cat /tmp/tee.tmp | if ! grep -w -E -A 30 "({1})" /tmp/tee.tmp > '
                  '/tmp/os_install_{0}.log; then echo "" > /tmp/os_install_{0}.log;fi'.format(name, new_pkgs_parser))
-            dep = sudo('cat /tmp/os_install_{}.log'.format(name))
+            dep = conn.sudo('cat /tmp/os_install_{}.log'.format(name))
             if dep == '':
                 dep = []
             else:
@@ -402,13 +402,13 @@ def install_os_pkg(requisites):
                     if i == name:
                         dep[n] = ''
                     else:
-                        sudo('apt show {0} 2>&1 | if ! grep Version: > '
+                        conn.sudo('apt show {0} 2>&1 | if ! grep Version: > '
                  '/tmp/os_install_{0}.log; then echo "" > /tmp/os_install_{0}.log;fi'.format(i))
                         dep[n] =sudo('cat /tmp/os_install_{}.log'.format(i)).replace('Version: ', '{} v.'.format(i))
                 dep = [i for i in dep if i]
             versions = []
-            sudo('apt list --installed | if ! grep {0}/ > /tmp/os_install_{0}.list; then  echo "" > /tmp/os_install_{0}.list;fi'.format(name))
-            res = sudo('cat /tmp/os_install_{}.list'.format(name))
+            conn.sudo('apt list --installed | if ! grep {0}/ > /tmp/os_install_{0}.list; then  echo "" > /tmp/os_install_{0}.list;fi'.format(name))
+            res = conn.sudo('cat /tmp/os_install_{}.list'.format(name))
             if err:
                 status_msg = 'installation_error'
                 if 'E: Unable to locate package {}'.format(name) in err:
@@ -425,8 +425,8 @@ def install_os_pkg(requisites):
                     status_msg = 'invalid_version'
             status.append({"group": "os_pkg", "name": name, "version": version, "status": status_msg,
                            "error_message": err, "add_pkgs": dep, "available_versions": versions})
-        sudo('unattended-upgrades -v')
-        sudo('export LC_ALL=C')
+        conn.sudo('unattended-upgrades -v')
+        conn.sudo('export LC_ALL=C')
         return status
     except Exception as err:
         for os_pkg in requisites:
@@ -440,7 +440,7 @@ def install_os_pkg(requisites):
 @backoff.on_exception(backoff.expo, SystemExit, max_tries=10)
 def remove_os_pkg(pkgs):
     try:
-        sudo('apt remove --purge -y {}'.format(' '.join(pkgs)))
+        conn.sudo('apt remove --purge -y {}'.format(' '.join(pkgs)))
     except:
         sys.exit(1)
 
@@ -450,7 +450,7 @@ def get_available_os_pkgs():
         os_pkgs = dict()
         ansi_escape = re.compile(r'\x1b[^m]*m')
         manage_pkg('update', 'remote', '')
-        apt_raw = sudo("apt list")
+        apt_raw = conn.sudo("apt list")
         apt_list = ansi_escape.sub('', apt_raw).split("\r\n")
         for pkg in apt_list:
             if "/" in pkg:
@@ -466,52 +466,52 @@ def install_caffe2(os_user, caffe2_version, cmake_version):
         manage_pkg('update', 'remote', '')
         manage_pkg('-y install --no-install-recommends', 'remote', 'build-essential cmake git libgoogle-glog-dev '
                    'libprotobuf-dev protobuf-compiler python3-dev python3-pip')
-        sudo('pip3 install numpy=={} protobuf --no-cache-dir'.format(os.environ['notebook_numpy_version']))
+        conn.sudo('pip3 install numpy=={} protobuf --no-cache-dir'.format(os.environ['notebook_numpy_version']))
         manage_pkg('-y install --no-install-recommends', 'remote', 'libgflags-dev')
         manage_pkg('-y install --no-install-recommends', 'remote', 'libgtest-dev libiomp-dev libleveldb-dev liblmdb-dev '
                    'libopencv-dev libopenmpi-dev libsnappy-dev openmpi-bin openmpi-doc python-pydot')
-        sudo('pip3 install flask graphviz hypothesis jupyter matplotlib==2.0.2 pydot python-nvd3 pyyaml requests scikit-image '
+        conn.sudo('pip3 install flask graphviz hypothesis jupyter matplotlib==2.0.2 pydot python-nvd3 pyyaml requests scikit-image '
              'scipy setuptools tornado --no-cache-dir')
-        sudo('cp -f /opt/cudnn/include/* /opt/cuda-{}/include/'.format(os.environ['notebook_cuda_version']))
-        sudo('cp -f /opt/cudnn/lib64/* /opt/cuda-{}/lib64/'.format(os.environ['notebook_cuda_version']))
-        sudo('wget https://cmake.org/files/v{2}/cmake-{1}.tar.gz -O /home/{0}/cmake-{1}.tar.gz'.format(
+        conn.sudo('cp -f /opt/cudnn/include/* /opt/cuda-{}/include/'.format(os.environ['notebook_cuda_version']))
+        conn.sudo('cp -f /opt/cudnn/lib64/* /opt/cuda-{}/lib64/'.format(os.environ['notebook_cuda_version']))
+        conn.sudo('wget https://cmake.org/files/v{2}/cmake-{1}.tar.gz -O /home/{0}/cmake-{1}.tar.gz'.format(
             os_user, cmake_version, cmake_version.split('.')[0] + "." + cmake_version.split('.')[1]))
-        sudo('tar -zxvf cmake-{}.tar.gz'.format(cmake_version))
+        conn.sudo('tar -zxvf cmake-{}.tar.gz'.format(cmake_version))
         with cd('/home/{}/cmake-{}/'.format(os_user, cmake_version)):
-            sudo('./bootstrap --prefix=/usr/local && make && make install')
-        sudo('ln -s /usr/local/bin/cmake /bin/cmake{}'.format(cmake_version))
-        sudo('git clone https://github.com/pytorch/pytorch.git')
+            conn.sudo('./bootstrap --prefix=/usr/local && make && make install')
+        conn.sudo('ln -s /usr/local/bin/cmake /bin/cmake{}'.format(cmake_version))
+        conn.sudo('git clone https://github.com/pytorch/pytorch.git')
         with cd('/home/{}/pytorch/'.format(os_user)):
-            sudo('git submodule update --init')
+            conn.sudo('git submodule update --init')
             with settings(warn_only=True):
-                sudo('git checkout {}'.format(os.environ['notebook_pytorch_branch']))
-                sudo('git submodule update --init --recursive')
-            sudo('python3 setup.py install')
-        sudo('touch /home/' + os_user + '/.ensure_dir/caffe2_ensured')
+                conn.sudo('git checkout {}'.format(os.environ['notebook_pytorch_branch']))
+                conn.sudo('git submodule update --init --recursive')
+            conn.sudo('python3 setup.py install')
+        conn.sudo('touch /home/' + os_user + '/.ensure_dir/caffe2_ensured')
 
 
 def install_cntk(os_user, cntk_version):
     if not exists('/home/{}/.ensure_dir/cntk_ensured'.format(os_user)):
-        sudo('pip3 install cntk-gpu=={} --no-cache-dir'.format(cntk_version))
-        sudo('touch /home/{}/.ensure_dir/cntk_ensured'.format(os_user))
+        conn.sudo('pip3 install cntk-gpu=={} --no-cache-dir'.format(cntk_version))
+        conn.sudo('touch /home/{}/.ensure_dir/cntk_ensured'.format(os_user))
 
 
 def install_keras(os_user, keras_version):
     if not exists('/home/{}/.ensure_dir/keras_ensured'.format(os_user)):
-        sudo('pip3 install keras=={} --no-cache-dir'.format(keras_version))
-        sudo('touch /home/{}/.ensure_dir/keras_ensured'.format(os_user))
+        conn.sudo('pip3 install keras=={} --no-cache-dir'.format(keras_version))
+        conn.sudo('touch /home/{}/.ensure_dir/keras_ensured'.format(os_user))
 
 
 def install_theano(os_user, theano_version):
     if not exists('/home/{}/.ensure_dir/theano_ensured'.format(os_user)):
-        sudo('python3 -m pip install Theano=={} --no-cache-dir'.format(theano_version))
-        sudo('touch /home/{}/.ensure_dir/theano_ensured'.format(os_user))
+        conn.sudo('python3 -m pip install Theano=={} --no-cache-dir'.format(theano_version))
+        conn.sudo('touch /home/{}/.ensure_dir/theano_ensured'.format(os_user))
 
 
 def install_mxnet(os_user, mxnet_version):
     if not exists('/home/{}/.ensure_dir/mxnet_ensured'.format(os_user)):
-        sudo('pip3 install mxnet-cu101=={} opencv-python --no-cache-dir'.format(mxnet_version))
-        sudo('touch /home/{}/.ensure_dir/mxnet_ensured'.format(os_user))
+        conn.sudo('pip3 install mxnet-cu101=={} opencv-python --no-cache-dir'.format(mxnet_version))
+        conn.sudo('touch /home/{}/.ensure_dir/mxnet_ensured'.format(os_user))
 
 
 #def install_torch(os_user):
@@ -521,11 +521,11 @@ def install_mxnet(os_user, mxnet_version):
 #           run('bash install-deps;')
 #           run('./install.sh -b')
 #        run('source /home/{}/.bashrc'.format(os_user))
-#        sudo('touch /home/{}/.ensure_dir/torch_ensured'.format(os_user))
+#        conn.sudo('touch /home/{}/.ensure_dir/torch_ensured'.format(os_user))
 
 
 def install_gitlab_cert(os_user, certfile):
     try:
-        sudo('mv -f /home/{0}/{1} /etc/ssl/certs/{1}'.format(os_user, certfile))
+        conn.sudo('mv -f /home/{0}/{1} /etc/ssl/certs/{1}'.format(os_user, certfile))
     except Exception as err:
         print('Failed to install gitlab certificate. {}'.format(str(err)))
diff --git a/infrastructure-provisioning/src/general/lib/os/debian/ssn_lib.py b/infrastructure-provisioning/src/general/lib/os/debian/ssn_lib.py
index 7a8064e..4d0aa4b 100644
--- a/infrastructure-provisioning/src/general/lib/os/debian/ssn_lib.py
+++ b/infrastructure-provisioning/src/general/lib/os/debian/ssn_lib.py
@@ -36,16 +36,16 @@ def ensure_docker_daemon(datalab_path, os_user, region):
     try:
         if not exists(datalab_path + 'tmp/docker_daemon_ensured'):
             docker_version = os.environ['ssn_docker_version']
-            sudo('curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -')
-            sudo('add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) \
+            conn.sudo('curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -')
+            conn.sudo('add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) \
                   stable"')
             manage_pkg('update', 'remote', '')
-            sudo('apt-cache policy docker-ce')
+            conn.sudo('apt-cache policy docker-ce')
             manage_pkg('-y install', 'remote', 'docker-ce=5:{}~3-0~ubuntu-focal'.format(docker_version))
-            sudo('usermod -a -G docker ' + os_user)
-            sudo('update-rc.d docker defaults')
-            sudo('update-rc.d docker enable')
-            sudo('touch ' + datalab_path + 'tmp/docker_daemon_ensured')
+            conn.sudo('usermod -a -G docker ' + os_user)
+            conn.sudo('update-rc.d docker defaults')
+            conn.sudo('update-rc.d docker enable')
+            conn.sudo('touch ' + datalab_path + 'tmp/docker_daemon_ensured')
         return True
     except:
         return False
@@ -55,10 +55,10 @@ def ensure_nginx(datalab_path):
     try:
         if not exists(datalab_path + 'tmp/nginx_ensured'):
             manage_pkg('-y install', 'remote', 'nginx')
-            sudo('service nginx restart')
-            sudo('update-rc.d nginx defaults')
-            sudo('update-rc.d nginx enable')
-            sudo('touch ' + datalab_path + 'tmp/nginx_ensured')
+            conn.sudo('service nginx restart')
+            conn.sudo('update-rc.d nginx defaults')
+            conn.sudo('update-rc.d nginx enable')
+            conn.sudo('touch ' + datalab_path + 'tmp/nginx_ensured')
     except Exception as err:
         traceback.print_exc()
         print('Failed to ensure Nginx: ', str(err))
@@ -68,11 +68,11 @@ def ensure_nginx(datalab_path):
 def ensure_jenkins(datalab_path):
     try:
         if not exists(datalab_path + 'tmp/jenkins_ensured'):
-            sudo('wget -q -O - https://pkg.jenkins.io/debian/jenkins-ci.org.key | apt-key add -')
-            sudo('echo deb http://pkg.jenkins.io/debian-stable binary/ > /etc/apt/sources.list.d/jenkins.list')
+            conn.sudo('wget -q -O - https://pkg.jenkins.io/debian/jenkins-ci.org.key | apt-key add -')
+            conn.sudo('echo deb http://pkg.jenkins.io/debian-stable binary/ > /etc/apt/sources.list.d/jenkins.list')
             manage_pkg('-y update', 'remote', '')
             manage_pkg('-y install', 'remote', 'jenkins')
-            sudo('touch ' + datalab_path + 'tmp/jenkins_ensured')
+            conn.sudo('touch ' + datalab_path + 'tmp/jenkins_ensured')
     except Exception as err:
         traceback.print_exc()
         print('Failed to ensure Jenkins: ', str(err))
@@ -82,21 +82,21 @@ def ensure_jenkins(datalab_path):
 def configure_jenkins(datalab_path, os_user, config, tag_resource_id):
     try:
         if not exists(datalab_path + 'tmp/jenkins_configured'):
-            sudo('echo \'JENKINS_ARGS="--prefix=/jenkins --httpPort=8070"\' >> /etc/default/jenkins')
-            sudo('rm -rf /var/lib/jenkins/*')
-            sudo('mkdir -p /var/lib/jenkins/jobs/')
-            sudo('chown -R ' + os_user + ':' + os_user + ' /var/lib/jenkins/')
-            put('/root/templates/jenkins_jobs/*', '/var/lib/jenkins/jobs/')
-            sudo(
+            conn.sudo('echo \'JENKINS_ARGS="--prefix=/jenkins --httpPort=8070"\' >> /etc/default/jenkins')
+            conn.sudo('rm -rf /var/lib/jenkins/*')
+            conn.sudo('mkdir -p /var/lib/jenkins/jobs/')
+            conn.sudo('chown -R ' + os_user + ':' + os_user + ' /var/lib/jenkins/')
+            conn.put('/root/templates/jenkins_jobs/*', '/var/lib/jenkins/jobs/')
+            conn.sudo(
                 "find /var/lib/jenkins/jobs/ -type f | xargs sed -i \'s/OS_USR/{}/g; s/SBN/{}/g; s/CTUN/{}/g; s/SGI/{}/g; s/VPC/{}/g; s/SNI/{}/g; s/AKEY/{}/g\'".format(
                     os_user, config['service_base_name'], tag_resource_id, config['security_group_id'],
                     config['vpc_id'], config['subnet_id'], config['admin_key']))
-            sudo('chown -R jenkins:jenkins /var/lib/jenkins')
-            sudo('/etc/init.d/jenkins stop; sleep 5')
-            sudo('systemctl enable jenkins')
-            sudo('systemctl start jenkins')
-            sudo('touch ' + datalab_path + '/tmp/jenkins_configured')
-            sudo('echo "jenkins ALL = NOPASSWD:ALL" >> /etc/sudoers')
+            conn.sudo('chown -R jenkins:jenkins /var/lib/jenkins')
+            conn.sudo('/etc/init.d/jenkins stop; sleep 5')
+            conn.sudo('systemctl enable jenkins')
+            conn.sudo('systemctl start jenkins')
+            conn.sudo('touch ' + datalab_path + '/tmp/jenkins_configured')
+            conn.sudo('echo "jenkins ALL = NOPASSWD:ALL" >> /etc/sudoers')
     except Exception as err:
         traceback.print_exc()
         print('Failed to configure Jenkins: ', str(err))
@@ -107,17 +107,17 @@ def configure_nginx(config, datalab_path, hostname):
     try:
         random_file_part = id_generator(size=20)
         if not exists("/etc/nginx/conf.d/nginx_proxy.conf"):
-            sudo('useradd -r nginx')
-            sudo('rm -f /etc/nginx/conf.d/*')
-            put(config['nginx_template_dir'] + 'ssn_nginx.conf', '/tmp/nginx.conf')
-            put(config['nginx_template_dir'] + 'nginx_proxy.conf', '/tmp/nginx_proxy.conf')
-            sudo("sed -i 's|SSN_HOSTNAME|" + hostname + "|' /tmp/nginx_proxy.conf")
-            sudo('mv /tmp/nginx.conf ' + datalab_path + 'tmp/')
-            sudo('mv /tmp/nginx_proxy.conf ' + datalab_path + 'tmp/')
-            sudo('\cp ' + datalab_path + 'tmp/nginx.conf /etc/nginx/')
-            sudo('\cp ' + datalab_path + 'tmp/nginx_proxy.conf /etc/nginx/conf.d/')
-            sudo('mkdir -p /etc/nginx/locations')
-            sudo('rm -f /etc/nginx/sites-enabled/default')
+            conn.sudo('useradd -r nginx')
+            conn.sudo('rm -f /etc/nginx/conf.d/*')
+            conn.put(config['nginx_template_dir'] + 'ssn_nginx.conf', '/tmp/nginx.conf')
+            conn.put(config['nginx_template_dir'] + 'nginx_proxy.conf', '/tmp/nginx_proxy.conf')
+            conn.sudo("sed -i 's|SSN_HOSTNAME|" + hostname + "|' /tmp/nginx_proxy.conf")
+            conn.sudo('mv /tmp/nginx.conf ' + datalab_path + 'tmp/')
+            conn.sudo('mv /tmp/nginx_proxy.conf ' + datalab_path + 'tmp/')
+            conn.sudo('\cp ' + datalab_path + 'tmp/nginx.conf /etc/nginx/')
+            conn.sudo('\cp ' + datalab_path + 'tmp/nginx_proxy.conf /etc/nginx/conf.d/')
+            conn.sudo('mkdir -p /etc/nginx/locations')
+            conn.sudo('rm -f /etc/nginx/sites-enabled/default')
     except Exception as err:
         traceback.print_exc()
         print('Failed to configure Nginx: ', str(err))
@@ -131,18 +131,18 @@ def configure_nginx(config, datalab_path, hostname):
                 with open(template_file) as tpl:
                     for line in tpl:
                         out.write(line)
-            put("/tmp/%s-tmpproxy_location_jenkins_template.conf" % random_file_part,
+            conn.put("/tmp/%s-tmpproxy_location_jenkins_template.conf" % random_file_part,
                 '/tmp/proxy_location_jenkins.conf')
-            sudo('mv /tmp/proxy_location_jenkins.conf ' + os.environ['ssn_datalab_path'] + 'tmp/')
-            sudo('\cp ' + os.environ['ssn_datalab_path'] + 'tmp/proxy_location_jenkins.conf /etc/nginx/locations/')
-            sudo("echo 'engineer:" + crypt.crypt(nginx_password, id_generator()) + "' > /etc/nginx/htpasswd")
+            conn.sudo('mv /tmp/proxy_location_jenkins.conf ' + os.environ['ssn_datalab_path'] + 'tmp/')
+            conn.sudo('\cp ' + os.environ['ssn_datalab_path'] + 'tmp/proxy_location_jenkins.conf /etc/nginx/locations/')
+            conn.sudo("echo 'engineer:" + crypt.crypt(nginx_password, id_generator()) + "' > /etc/nginx/htpasswd")
             with open('jenkins_creds.txt', 'w+') as f:
                 f.write("Jenkins credentials: engineer  / " + nginx_password)
     except:
         return False
 
     try:
-        sudo('service nginx reload')
+        conn.sudo('service nginx reload')
         return True
     except:
         return False
@@ -152,9 +152,9 @@ def ensure_supervisor():
     try:
         if not exists(os.environ['ssn_datalab_path'] + 'tmp/superv_ensured'):
             manage_pkg('-y install', 'remote', 'supervisor')
-            sudo('update-rc.d supervisor defaults')
-            sudo('update-rc.d supervisor enable')
-            sudo('touch ' + os.environ['ssn_datalab_path'] + 'tmp/superv_ensured')
+            conn.sudo('update-rc.d supervisor defaults')
+            conn.sudo('update-rc.d supervisor enable')
+            conn.sudo('touch ' + os.environ['ssn_datalab_path'] + 'tmp/superv_ensured')
     except Exception as err:
         traceback.print_exc()
         print('Failed to install Supervisor: ', str(err))
@@ -164,13 +164,13 @@ def ensure_supervisor():
 def ensure_mongo():
     try:
         if not exists(os.environ['ssn_datalab_path'] + 'tmp/mongo_ensured'):
-            sudo('wget -qO - https://www.mongodb.org/static/pgp/server-4.4.asc | apt-key add -')
-            sudo('ver=`lsb_release -cs`; echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu '
+            conn.sudo('wget -qO - https://www.mongodb.org/static/pgp/server-4.4.asc | apt-key add -')
+            conn.sudo('ver=`lsb_release -cs`; echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu '
                  '$ver/mongodb-org/4.4 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-4.4.list; '
                  'apt update')
             manage_pkg('-y install', 'remote', 'mongodb-org')
-            sudo('systemctl enable mongod.service')
-            sudo('touch ' + os.environ['ssn_datalab_path'] + 'tmp/mongo_ensured')
+            conn.sudo('systemctl enable mongod.service')
+            conn.sudo('touch ' + os.environ['ssn_datalab_path'] + 'tmp/mongo_ensured')
     except Exception as err:
         traceback.print_exc()
         print('Failed to install MongoDB: ', str(err))
@@ -189,18 +189,18 @@ def start_ss(keyfile, host_string, datalab_conf_dir, web_path,
              keycloak_client_secret, keycloak_auth_server_url, report_path=''):
     try:
         if not exists(os.environ['ssn_datalab_path'] + 'tmp/ss_started'):
-            java_path = sudo("update-alternatives --query java | grep 'Value: ' | grep -o '/.*/jre'")
+            java_path = conn.sudo("update-alternatives --query java | grep 'Value: ' | grep -o '/.*/jre'")
             supervisor_conf = '/etc/supervisor/conf.d/supervisor_svc.conf'
             local('sed -i "s|MONGO_PASSWORD|{}|g" /root/templates/ssn.yml'.format(mongo_passwd))
             local('sed -i "s|KEYSTORE_PASSWORD|{}|g" /root/templates/ssn.yml'.format(keystore_passwd))
             local('sed -i "s|CLOUD_PROVIDER|{}|g" /root/templates/ssn.yml'.format(cloud_provider))
             local('sed -i "s|\${JRE_HOME}|' + java_path + '|g" /root/templates/ssn.yml')
-            sudo('sed -i "s|KEYNAME|{}|g" {}/webapp/provisioning-service/conf/provisioning.yml'.
+            conn.sudo('sed -i "s|KEYNAME|{}|g" {}/webapp/provisioning-service/conf/provisioning.yml'.
                  format(os.environ['conf_key_name'], datalab_path))
-            put('/root/templates/ssn.yml', '/tmp/ssn.yml')
-            sudo('mv /tmp/ssn.yml ' + os.environ['ssn_datalab_path'] + 'conf/')
-            put('/root/templates/proxy_location_webapp_template.conf', '/tmp/proxy_location_webapp_template.conf')
-            sudo('mv /tmp/proxy_location_webapp_template.conf ' + os.environ['ssn_datalab_path'] + 'tmp/')
+            conn.put('/root/templates/ssn.yml', '/tmp/ssn.yml')
+            conn.sudo('mv /tmp/ssn.yml ' + os.environ['ssn_datalab_path'] + 'conf/')
+            conn.put('/root/templates/proxy_location_webapp_template.conf', '/tmp/proxy_location_webapp_template.conf')
+            conn.sudo('mv /tmp/proxy_location_webapp_template.conf ' + os.environ['ssn_datalab_path'] + 'tmp/')
             if cloud_provider == 'aws':
                 conf_parameter_name = '--spring.config.location={0}billing_app.yml --conf '.format(datalab_conf_dir)
                 with open('/root/templates/supervisor_svc.conf', 'r') as f:
@@ -217,43 +217,43 @@ def start_ss(keyfile, host_string, datalab_conf_dir, web_path,
                     .replace('CONF_PARAMETER_NAME', conf_parameter_name)
                 with open('/root/templates/supervisor_svc.conf', 'w') as f:
                     f.write(text)
-            put('/root/templates/supervisor_svc.conf', '/tmp/supervisor_svc.conf')
-            sudo('mv /tmp/supervisor_svc.conf ' + os.environ['ssn_datalab_path'] + 'tmp/')
-            sudo('cp ' + os.environ['ssn_datalab_path'] +
+            conn.put('/root/templates/supervisor_svc.conf', '/tmp/supervisor_svc.conf')
+            conn.sudo('mv /tmp/supervisor_svc.conf ' + os.environ['ssn_datalab_path'] + 'tmp/')
+            conn.sudo('cp ' + os.environ['ssn_datalab_path'] +
                  'tmp/proxy_location_webapp_template.conf /etc/nginx/locations/proxy_location_webapp.conf')
-            sudo('cp ' + os.environ['ssn_datalab_path'] + 'tmp/supervisor_svc.conf {}'.format(supervisor_conf))
-            sudo('sed -i \'s=WEB_APP_DIR={}=\' {}'.format(web_path, supervisor_conf))
+            conn.sudo('cp ' + os.environ['ssn_datalab_path'] + 'tmp/supervisor_svc.conf {}'.format(supervisor_conf))
+            conn.sudo('sed -i \'s=WEB_APP_DIR={}=\' {}'.format(web_path, supervisor_conf))
             try:
-                sudo('mkdir -p /var/log/application')
-                run('mkdir -p /tmp/yml_tmp/')
+                conn.sudo('mkdir -p /var/log/application')
+                conn.run('mkdir -p /tmp/yml_tmp/')
                 for service in ['self-service', 'provisioning-service', 'billing']:
-                    jar = sudo('cd {0}{1}/lib/; find {1}*.jar -type f'.format(web_path, service))
-                    sudo('ln -s {0}{2}/lib/{1} {0}{2}/{2}.jar '.format(web_path, jar, service))
-                    sudo('cp {0}/webapp/{1}/conf/*.yml /tmp/yml_tmp/'.format(datalab_path, service))
+                    jar = conn.sudo('cd {0}{1}/lib/; find {1}*.jar -type f'.format(web_path, service))
+                    conn.sudo('ln -s {0}{2}/lib/{1} {0}{2}/{2}.jar '.format(web_path, jar, service))
+                    conn.sudo('cp {0}/webapp/{1}/conf/*.yml /tmp/yml_tmp/'.format(datalab_path, service))
                 # Replacing Keycloak and cloud parameters
                 for item in json.loads(cloud_params):
                     if "KEYCLOAK_" in item['key']:
-                        sudo('sed -i "s|{0}|{1}|g" /tmp/yml_tmp/self-service.yml'.format(
+                        conn.sudo('sed -i "s|{0}|{1}|g" /tmp/yml_tmp/self-service.yml'.format(
                             item['key'], item['value']))
-                    sudo('sed -i "s|{0}|{1}|g" /tmp/yml_tmp/provisioning.yml'.format(
+                    conn.sudo('sed -i "s|{0}|{1}|g" /tmp/yml_tmp/provisioning.yml'.format(
                         item['key'], item['value']))
-                sudo('sed -i "s|SERVICE_BASE_NAME|{0}|g" /tmp/yml_tmp/self-service.yml'.format(service_base_name))
-                sudo('sed -i "s|OPERATION_SYSTEM|debian|g" /tmp/yml_tmp/self-service.yml')
-                sudo('sed -i "s|<SSN_INSTANCE_SIZE>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(
+                conn.sudo('sed -i "s|SERVICE_BASE_NAME|{0}|g" /tmp/yml_tmp/self-service.yml'.format(service_base_name))
+                conn.sudo('sed -i "s|OPERATION_SYSTEM|debian|g" /tmp/yml_tmp/self-service.yml')
+                conn.sudo('sed -i "s|<SSN_INSTANCE_SIZE>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(
                     os.environ['{0}_ssn_instance_size'.format(os.environ['conf_cloud_provider'])]))
                 if cloud_provider == 'azure':
-                    sudo('sed -i "s|<LOGIN_USE_LDAP>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(ldap_login))
-                    sudo('sed -i "s|<LOGIN_TENANT_ID>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(tenant_id))
-                    sudo('sed -i "s|<LOGIN_APPLICATION_ID>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(application_id))
-                    sudo('sed -i "s|<DATALAB_SUBSCRIPTION_ID>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(
+                    conn.sudo('sed -i "s|<LOGIN_USE_LDAP>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(ldap_login))
+                    conn.sudo('sed -i "s|<LOGIN_TENANT_ID>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(tenant_id))
+                    conn.sudo('sed -i "s|<LOGIN_APPLICATION_ID>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(application_id))
+                    conn.sudo('sed -i "s|<DATALAB_SUBSCRIPTION_ID>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(
                         subscription_id))
-                    sudo('sed -i "s|<MANAGEMENT_API_AUTH_FILE>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(
+                    conn.sudo('sed -i "s|<MANAGEMENT_API_AUTH_FILE>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(
                         authentication_file))
-                    sudo('sed -i "s|<VALIDATE_PERMISSION_SCOPE>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(
+                    conn.sudo('sed -i "s|<VALIDATE_PERMISSION_SCOPE>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(
                         validate_permission_scope))
-                    sudo('sed -i "s|<LOGIN_APPLICATION_REDIRECT_URL>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(
+                    conn.sudo('sed -i "s|<LOGIN_APPLICATION_REDIRECT_URL>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(
                         hostname))
-                    sudo('sed -i "s|<LOGIN_PAGE>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(hostname))
+                    conn.sudo('sed -i "s|<LOGIN_PAGE>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(hostname))
                     # if os.environ['azure_datalake_enable'] == 'true':
                     #     permission_scope = 'subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataLakeStore/accounts/{}/providers/Microsoft.Authorization/'.format(
                     #         subscription_id, service_base_name, data_lake_name)
@@ -261,8 +261,8 @@ def start_ss(keyfile, host_string, datalab_conf_dir, web_path,
                     #     permission_scope = 'subscriptions/{}/resourceGroups/{}/providers/Microsoft.Authorization/'.format(
                     #         subscription_id, service_base_name
                     #     )
-                sudo('mv /tmp/yml_tmp/* ' + datalab_conf_dir)
-                sudo('rmdir /tmp/yml_tmp/')
+                conn.sudo('mv /tmp/yml_tmp/* ' + datalab_conf_dir)
+                conn.sudo('rmdir /tmp/yml_tmp/')
             except:
                 append_result("Unable to upload webapp jars")
                 sys.exit(1)
@@ -331,37 +331,37 @@ def start_ss(keyfile, host_string, datalab_conf_dir, web_path,
                                    keycloak_client_id,
                                    keycloak_client_secret,
                                    keycloak_auth_server_url)
-                sudo('python3 /tmp/configure_billing.py {}'.format(params))
+                conn.sudo('python3 /tmp/configure_billing.py {}'.format(params))
             try:
                 if os.environ['conf_stepcerts_enabled'] == 'true':
-                    sudo(
+                    conn.sudo(
                         'openssl pkcs12 -export -in /etc/ssl/certs/datalab.crt -inkey /etc/ssl/certs/datalab.key -name ssn '
                         '-out ssn.p12 -password pass:{0}'.format(keystore_passwd))
-                    sudo('keytool -importkeystore -srckeystore ssn.p12 -srcstoretype PKCS12 -alias ssn -destkeystore '
+                    conn.sudo('keytool -importkeystore -srckeystore ssn.p12 -srcstoretype PKCS12 -alias ssn -destkeystore '
                          '/home/{0}/keys/ssn.keystore.jks -deststorepass "{1}" -srcstorepass "{1}"'.format(
                         os_user, keystore_passwd))
-                    sudo('keytool -keystore /home/{0}/keys/ssn.keystore.jks -alias step-ca -import -file '
+                    conn.sudo('keytool -keystore /home/{0}/keys/ssn.keystore.jks -alias step-ca -import -file '
                          '/etc/ssl/certs/root_ca.crt  -deststorepass "{1}" -srcstorepass "{1}" -noprompt'.format(
                           os_user, keystore_passwd))
-                    sudo('keytool -importcert -trustcacerts -alias step-ca -file /etc/ssl/certs/root_ca.crt '
+                    conn.sudo('keytool -importcert -trustcacerts -alias step-ca -file /etc/ssl/certs/root_ca.crt '
                          '-noprompt -storepass changeit -keystore {1}/lib/security/cacerts'.format(os_user, java_path))
-                    sudo('keytool -importcert -trustcacerts -alias ssn -file /etc/ssl/certs/datalab.crt -noprompt '
+                    conn.sudo('keytool -importcert -trustcacerts -alias ssn -file /etc/ssl/certs/datalab.crt -noprompt '
                          '-storepass changeit -keystore {0}/lib/security/cacerts'.format(java_path))
                 else:
-                    sudo('keytool -genkeypair -alias ssn -keyalg RSA -validity 730 -storepass {1} -keypass {1} \
+                    conn.sudo('keytool -genkeypair -alias ssn -keyalg RSA -validity 730 -storepass {1} -keypass {1} \
                          -keystore /home/{0}/keys/ssn.keystore.jks -keysize 2048 -dname "CN=localhost"'.format(
                         os_user, keystore_passwd))
-                    sudo('keytool -exportcert -alias ssn -storepass {1} -file /etc/ssl/certs/datalab.crt \
+                    conn.sudo('keytool -exportcert -alias ssn -storepass {1} -file /etc/ssl/certs/datalab.crt \
                          -keystore /home/{0}/keys/ssn.keystore.jks'.format(os_user, keystore_passwd))
-                    sudo('keytool -importcert -trustcacerts -alias ssn -file /etc/ssl/certs/datalab.crt -noprompt \
+                    conn.sudo('keytool -importcert -trustcacerts -alias ssn -file /etc/ssl/certs/datalab.crt -noprompt \
                          -storepass changeit -keystore {1}/lib/security/cacerts'.format(os_user, java_path))
             except:
                 append_result("Unable to generate cert and copy to java keystore")
                 sys.exit(1)
-            sudo('service supervisor start')
-            sudo('service nginx restart')
-            sudo('service supervisor restart')
-            sudo('touch ' + os.environ['ssn_datalab_path'] + 'tmp/ss_started')
+            conn.sudo('service supervisor start')
+            conn.sudo('service nginx restart')
+            conn.sudo('service supervisor restart')
+            conn.sudo('touch ' + os.environ['ssn_datalab_path'] + 'tmp/ss_started')
     except Exception as err:
         traceback.print_exc()
         print('Failed to start Self-service: ', str(err))
@@ -374,15 +374,15 @@ def install_build_dep():
             maven_version = '3.5.4'
             manage_pkg('-y install', 'remote', 'openjdk-8-jdk git wget unzip')
             with cd('/opt/'):
-                sudo(
+                conn.sudo(
                     'wget http://mirrors.sonic.net/apache/maven/maven-{0}/{1}/binaries/apache-maven-{1}-bin.zip'.format(
                         maven_version.split('.')[0], maven_version))
-                sudo('unzip apache-maven-{}-bin.zip'.format(maven_version))
-                sudo('mv apache-maven-{} maven'.format(maven_version))
-            sudo('bash -c "curl --silent --location https://deb.nodesource.com/setup_12.x | bash -"')
+                conn.sudo('unzip apache-maven-{}-bin.zip'.format(maven_version))
+                conn.sudo('mv apache-maven-{} maven'.format(maven_version))
+            conn.sudo('bash -c "curl --silent --location https://deb.nodesource.com/setup_12.x | bash -"')
             manage_pkg('-y install', 'remote', 'nodejs')
-            sudo('npm config set unsafe-perm=true')
-            sudo('touch {}tmp/build_dep_ensured'.format(os.environ['ssn_datalab_path']))
+            conn.sudo('npm config set unsafe-perm=true')
+            conn.sudo('touch {}tmp/build_dep_ensured'.format(os.environ['ssn_datalab_path']))
     except Exception as err:
         traceback.print_exc()
         print('Failed to install build dependencies for UI: ', str(err))
diff --git a/infrastructure-provisioning/src/general/lib/os/fab.py b/infrastructure-provisioning/src/general/lib/os/fab.py
index c699b17..951d015 100644
--- a/infrastructure-provisioning/src/general/lib/os/fab.py
+++ b/infrastructure-provisioning/src/general/lib/os/fab.py
@@ -40,12 +40,12 @@ from fabric.contrib.files import exists
 def ensure_pip(requisites):
     try:
         if not exists('/home/{}/.ensure_dir/pip_path_added'.format(os.environ['conf_os_user'])):
-            sudo('echo PATH=$PATH:/usr/local/bin/:/opt/spark/bin/ >> /etc/profile')
-            sudo('echo export PATH >> /etc/profile')
-            sudo('pip3 install -UI pip=={} --no-cache-dir'.format(os.environ['conf_pip_version']))
-            sudo('pip3 install --upgrade setuptools')
-            sudo('pip3 install -UI {} --no-cache-dir'.format(requisites))
-            sudo('touch /home/{}/.ensure_dir/pip_path_added'.format(os.environ['conf_os_user']))
+            conn.sudo('echo PATH=$PATH:/usr/local/bin/:/opt/spark/bin/ >> /etc/profile')
+            conn.sudo('echo export PATH >> /etc/profile')
+            conn.sudo('pip3 install -UI pip=={} --no-cache-dir'.format(os.environ['conf_pip_version']))
+            conn.sudo('pip3 install --upgrade setuptools')
+            conn.sudo('pip3 install -UI {} --no-cache-dir'.format(requisites))
+            conn.sudo('touch /home/{}/.ensure_dir/pip_path_added'.format(os.environ['conf_os_user']))
     except:
         sys.exit(1)
 
@@ -61,10 +61,10 @@ def install_pip_pkg(requisites, pip_version, lib_group):
         if pip_version == 'pip3' and not exists('/bin/pip3'):
             for v in range(4, 8):
                 if exists('/bin/pip3.{}'.format(v)):
-                    sudo('ln -s /bin/pip3.{} /bin/pip3'.format(v))
-        sudo('{} install -U pip=={} setuptools'.format(pip_version, os.environ['conf_pip_version']))
-        sudo('{} install -U pip=={} --no-cache-dir'.format(pip_version, os.environ['conf_pip_version']))
-        sudo('{} install --upgrade pip=={}'.format(pip_version, os.environ['conf_pip_version']))
+                    conn.sudo('ln -s /bin/pip3.{} /bin/pip3'.format(v))
+        conn.sudo('{} install -U pip=={} setuptools'.format(pip_version, os.environ['conf_pip_version']))
+        conn.sudo('{} install -U pip=={} --no-cache-dir'.format(pip_version, os.environ['conf_pip_version']))
+        conn.sudo('{} install --upgrade pip=={}'.format(pip_version, os.environ['conf_pip_version']))
         for pip_pkg in requisites:
             name, vers = pip_pkg
             if pip_pkg[1] == '' or pip_pkg[1] == 'N/A':
@@ -73,20 +73,20 @@ def install_pip_pkg(requisites, pip_version, lib_group):
             else:
                 version = pip_pkg[1]
                 pip_pkg = "{}=={}".format(pip_pkg[0], pip_pkg[1])
-            sudo('{0} install -U {1} --no-cache-dir 2>&1 | tee /tmp/tee.tmp; if ! grep -w -i -E  "({2})" /tmp/tee.tmp > '
+            conn.sudo('{0} install -U {1} --no-cache-dir 2>&1 | tee /tmp/tee.tmp; if ! grep -w -i -E  "({2})" /tmp/tee.tmp > '
                  ' /tmp/{0}install_{3}.log; then  echo "" > /tmp/{0}install_{3}.log;fi'.format(pip_version, pip_pkg, error_parser, name))
-            err = sudo('cat /tmp/{0}install_{1}.log'.format(pip_version, pip_pkg.split("==")[0])).replace('"', "'")
-            sudo('{0} freeze --all | if ! grep -w -i {1} > /tmp/{0}install_{1}.list; then  echo "" > /tmp/{0}install_{1}.list;fi'.format(pip_version, name))
-            res = sudo('cat /tmp/{0}install_{1}.list'.format(pip_version, name))
-            sudo('cat /tmp/tee.tmp | if ! grep "Successfully installed" > /tmp/{0}install_{1}.list; then  echo "" > /tmp/{0}install_{1}.list;fi'.format(pip_version, name))
-            installed_out = sudo('cat /tmp/{0}install_{1}.list'.format(pip_version, name))
+            err = conn.sudo('cat /tmp/{0}install_{1}.log'.format(pip_version, pip_pkg.split("==")[0])).replace('"', "'")
+            conn.sudo('{0} freeze --all | if ! grep -w -i {1} > /tmp/{0}install_{1}.list; then  echo "" > /tmp/{0}install_{1}.list;fi'.format(pip_version, name))
+            res = conn.sudo('cat /tmp/{0}install_{1}.list'.format(pip_version, name))
+            conn.sudo('cat /tmp/tee.tmp | if ! grep "Successfully installed" > /tmp/{0}install_{1}.list; then  echo "" > /tmp/{0}install_{1}.list;fi'.format(pip_version, name))
+            installed_out = conn.sudo('cat /tmp/{0}install_{1}.list'.format(pip_version, name))
             changed_pip_pkg = False
             if res == '':
                 changed_pip_pkg = pip_pkg.split("==")[0].replace("_", "-").split('-')
                 changed_pip_pkg = changed_pip_pkg[0]
-                sudo('{0} freeze --all | if ! grep -w -i {1} > /tmp/{0}install_{1}.list; then  echo "" > '
+                conn.sudo('{0} freeze --all | if ! grep -w -i {1} > /tmp/{0}install_{1}.list; then  echo "" > '
                      '/tmp/{0}install_{1}.list;fi'.format(pip_version, changed_pip_pkg))
-                res = sudo('cat /tmp/{0}install_{1}.list'.format(pip_version, changed_pip_pkg))
+                res = conn.sudo('cat /tmp/{0}install_{1}.list'.format(pip_version, changed_pip_pkg))
             if err and name not in installed_out:
                 status_msg = 'installation_error'
                 if 'ERROR: No matching distribution found for {}'.format(name) in err:
@@ -110,9 +110,9 @@ def install_pip_pkg(requisites, pip_version, lib_group):
                 else:
                     versions = []
 
-            sudo('if ! grep -w -i -E  "Installing collected packages:" /tmp/tee.tmp > /tmp/{0}install_{1}.log; '
+            conn.sudo('if ! grep -w -i -E  "Installing collected packages:" /tmp/tee.tmp > /tmp/{0}install_{1}.log; '
                  'then  echo "" > /tmp/{0}install_{1}.log;fi'.format(pip_version, name))
-            dep = sudo('cat /tmp/{0}install_{1}.log'.format(pip_version, name)).replace('\r\n', '').strip()[31:]
+            dep = conn.sudo('cat /tmp/{0}install_{1}.log'.format(pip_version, name)).replace('\r\n', '').strip()[31:]
             if dep == '':
                 dep = []
             else:
@@ -121,9 +121,9 @@ def install_pip_pkg(requisites, pip_version, lib_group):
                     if i == name:
                         dep[n] = ''
                     else:
-                        sudo('{0} show {1} 2>&1 | if ! grep Version: /tmp/tee.tmp > '
+                        conn.sudo('{0} show {1} 2>&1 | if ! grep Version: /tmp/tee.tmp > '
                              '/tmp/{0}_install_{1}.log; then echo "" > /tmp/{0}_install_{1}.log;fi'.format(pip_version, i))
-                        dep[n] = sudo('cat /tmp/{0}_install_{1}.log'.format(pip_version, i)).replace('Version: ', '{} v.'.format(i))
+                        dep[n] = conn.sudo('cat /tmp/{0}_install_{1}.log'.format(pip_version, i)).replace('Version: ', '{} v.'.format(i))
                 dep = [i for i in dep if i]
             status.append({"group": lib_group, "name": name, "version": version, "status": status_msg,
                            "error_message": err, "available_versions": versions, "add_pkgs": dep})
@@ -184,63 +184,63 @@ def append_result(error, exception=''):
 def put_resource_status(resource, status, datalab_path, os_user, hostname):
     keyfile = os.environ['conf_key_dir'] + os.environ['conf_key_name'] + ".pem"
     init_datalab_connection(hostname, os_user, keyfile)
-    sudo('python3 ' + datalab_path + 'tmp/resource_status.py --resource {} --status {}'.format(resource, status))
+    conn.sudo('python3 ' + datalab_path + 'tmp/resource_status.py --resource {} --status {}'.format(resource, status))
     close_connection()
 
 
 def configure_jupyter(os_user, jupyter_conf_file, templates_dir, jupyter_version, exploratory_name):
     if not exists('/home/' + os_user + '/.ensure_dir/jupyter_ensured'):
         try:
-            sudo('pip3 install notebook=={} --no-cache-dir'.format(jupyter_version))
-            sudo('pip3 install jupyter --no-cache-dir')
-            sudo('rm -rf {}'.format(jupyter_conf_file))
-            run('jupyter notebook --generate-config --config {}'.format(jupyter_conf_file))
+            conn.sudo('pip3 install notebook=={} --no-cache-dir'.format(jupyter_version))
+            conn.sudo('pip3 install jupyter --no-cache-dir')
+            conn.sudo('rm -rf {}'.format(jupyter_conf_file))
+            conn.run('jupyter notebook --generate-config --config {}'.format(jupyter_conf_file))
             with cd('/home/{}'.format(os_user)):
-                run('mkdir -p ~/.jupyter/custom/')
-                run('echo "#notebook-container { width: auto; }" > ~/.jupyter/custom/custom.css')
-            sudo('echo "c.NotebookApp.ip = \'0.0.0.0\'" >> {}'.format(jupyter_conf_file))
-            sudo('echo "c.NotebookApp.base_url = \'/{0}/\'" >> {1}'.format(exploratory_name, jupyter_conf_file))
-            sudo('echo c.NotebookApp.open_browser = False >> {}'.format(jupyter_conf_file))
-            sudo('echo \'c.NotebookApp.cookie_secret = b"{0}"\' >> {1}'.format(id_generator(), jupyter_conf_file))
-            sudo('''echo "c.NotebookApp.token = u''" >> {}'''.format(jupyter_conf_file))
-            sudo('echo \'c.KernelSpecManager.ensure_native_kernel = False\' >> {}'.format(jupyter_conf_file))
-            put(templates_dir + 'jupyter-notebook.service', '/tmp/jupyter-notebook.service')
-            sudo("chmod 644 /tmp/jupyter-notebook.service")
+                conn.run('mkdir -p ~/.jupyter/custom/')
+                conn.run('echo "#notebook-container { width: auto; }" > ~/.jupyter/custom/custom.css')
+            conn.sudo('echo "c.NotebookApp.ip = \'0.0.0.0\'" >> {}'.format(jupyter_conf_file))
+            conn.sudo('echo "c.NotebookApp.base_url = \'/{0}/\'" >> {1}'.format(exploratory_name, jupyter_conf_file))
+            conn.sudo('echo c.NotebookApp.open_browser = False >> {}'.format(jupyter_conf_file))
+            conn.sudo('echo \'c.NotebookApp.cookie_secret = b"{0}"\' >> {1}'.format(id_generator(), jupyter_conf_file))
+            conn.sudo('''echo "c.NotebookApp.token = u''" >> {}'''.format(jupyter_conf_file))
+            conn.sudo('echo \'c.KernelSpecManager.ensure_native_kernel = False\' >> {}'.format(jupyter_conf_file))
+            conn.put(templates_dir + 'jupyter-notebook.service', '/tmp/jupyter-notebook.service')
+            conn.sudo("chmod 644 /tmp/jupyter-notebook.service")
             if os.environ['application'] == 'tensor':
-                sudo("sed -i '/ExecStart/s|-c \"|-c \"export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/opt/cudnn/lib64:/usr/local/cuda/lib64; |g' /tmp/jupyter-notebook.service")
+                conn.sudo("sed -i '/ExecStart/s|-c \"|-c \"export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/opt/cudnn/lib64:/usr/local/cuda/lib64; |g' /tmp/jupyter-notebook.service")
             elif os.environ['application'] == 'deeplearning':
-                sudo("sed -i '/ExecStart/s|-c \"|-c \"export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/opt/cudnn/lib64:"
+                conn.sudo("sed -i '/ExecStart/s|-c \"|-c \"export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/opt/cudnn/lib64:"
                      "/usr/local/cuda/lib64:/usr/lib64/openmpi/lib: ; export PYTHONPATH=/home/" + os_user +
                      "/caffe/python:/home/" + os_user + "/pytorch/build:$PYTHONPATH ; |g' /tmp/jupyter-notebook.service")
-            sudo("sed -i 's|CONF_PATH|{}|' /tmp/jupyter-notebook.service".format(jupyter_conf_file))
-            sudo("sed -i 's|OS_USR|{}|' /tmp/jupyter-notebook.service".format(os_user))
-            http_proxy = run('echo $http_proxy')
-            https_proxy = run('echo $https_proxy')
+            conn.sudo("sed -i 's|CONF_PATH|{}|' /tmp/jupyter-notebook.service".format(jupyter_conf_file))
+            conn.sudo("sed -i 's|OS_USR|{}|' /tmp/jupyter-notebook.service".format(os_user))
+            http_proxy = conn.run('echo $http_proxy')
+            https_proxy = conn.run('echo $https_proxy')
             #sudo('sed -i \'/\[Service\]/ a\Environment=\"HTTP_PROXY={}\"\'  /tmp/jupyter-notebook.service'.format(
             #    http_proxy))
             #sudo('sed -i \'/\[Service\]/ a\Environment=\"HTTPS_PROXY={}\"\'  /tmp/jupyter-notebook.service'.format(
             #    https_proxy))
-            java_home = run("update-alternatives --query java | grep -o --color=never \'/.*/java-8.*/jre\'").splitlines()[0]
-            sudo('sed -i \'/\[Service\]/ a\Environment=\"JAVA_HOME={}\"\'  /tmp/jupyter-notebook.service'.format(
+            java_home = conn.run("update-alternatives --query java | grep -o --color=never \'/.*/java-8.*/jre\'").splitlines()[0]
+            conn.sudo('sed -i \'/\[Service\]/ a\Environment=\"JAVA_HOME={}\"\'  /tmp/jupyter-notebook.service'.format(
                 java_home))
-            sudo('\cp /tmp/jupyter-notebook.service /etc/systemd/system/jupyter-notebook.service')
-            sudo('chown -R {0}:{0} /home/{0}/.local'.format(os_user))
-            sudo('mkdir -p /mnt/var')
-            sudo('chown {0}:{0} /mnt/var'.format(os_user))
+            conn.sudo('\cp /tmp/jupyter-notebook.service /etc/systemd/system/jupyter-notebook.service')
+            conn.sudo('chown -R {0}:{0} /home/{0}/.local'.format(os_user))
+            conn.sudo('mkdir -p /mnt/var')
+            conn.sudo('chown {0}:{0} /mnt/var'.format(os_user))
             if os.environ['application'] == 'jupyter':
-                sudo('jupyter-kernelspec remove -f python2 || echo "Such kernel doesnt exists"')
-                sudo('jupyter-kernelspec remove -f python3 || echo "Such kernel doesnt exists"')
-            sudo("systemctl daemon-reload")
-            sudo("systemctl enable jupyter-notebook")
-            sudo("systemctl start jupyter-notebook")
-            sudo('touch /home/{}/.ensure_dir/jupyter_ensured'.format(os_user))
+                conn.sudo('jupyter-kernelspec remove -f python2 || echo "Such kernel doesnt exists"')
+                conn.sudo('jupyter-kernelspec remove -f python3 || echo "Such kernel doesnt exists"')
+            conn.sudo("systemctl daemon-reload")
+            conn.sudo("systemctl enable jupyter-notebook")
+            conn.sudo("systemctl start jupyter-notebook")
+            conn.sudo('touch /home/{}/.ensure_dir/jupyter_ensured'.format(os_user))
         except:
             sys.exit(1)
     else:
         try:
-            sudo(
+            conn.sudo(
                 'sed -i "s/c.NotebookApp.base_url =.*/c.NotebookApp.base_url = \'\/{0}\/\'/" {1}'.format(exploratory_name, jupyter_conf_file))
-            sudo("systemctl restart jupyter-notebook")
+            conn.sudo("systemctl restart jupyter-notebook")
         except Exception as err:
             print('Error:', str(err))
             sys.exit(1)
@@ -249,13 +249,13 @@ def configure_docker(os_user):
     try:
         if not exists('/home/' + os_user + '/.ensure_dir/docker_ensured'):
             docker_version = os.environ['ssn_docker_version']
-            sudo('curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -')
-            sudo('add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) \
+            conn.sudo('curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -')
+            conn.sudo('add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) \
                   stable"')
             manage_pkg('update', 'remote', '')
-            sudo('apt-cache policy docker-ce')
+            conn.sudo('apt-cache policy docker-ce')
             manage_pkg('-y install', 'remote', 'docker-ce=5:{}~3-0~ubuntu-focal'.format(docker_version))
-            sudo('touch /home/{}/.ensure_dir/docker_ensured'.format(os_user))
+            conn.sudo('touch /home/{}/.ensure_dir/docker_ensured'.format(os_user))
     except Exception as err:
         print('Failed to configure Docker:', str(err))
         sys.exit(1)
@@ -263,76 +263,76 @@ def configure_docker(os_user):
 def ensure_jupyterlab_files(os_user, jupyterlab_dir, jupyterlab_image, jupyter_conf_file, jupyterlab_conf_file, exploratory_name, edge_ip):
     if not exists(jupyterlab_dir):
         try:
-            sudo('mkdir {}'.format(jupyterlab_dir))
-#            put(templates_dir + 'pyspark_local_template.json', '/tmp/pyspark_local_template.json')
-#            put(templates_dir + 'py3spark_local_template.json', '/tmp/py3spark_local_template.json')
-            put('/root/Dockerfile_jupyterlab', '/tmp/Dockerfile_jupyterlab')
-            put('/root/scripts/*', '/tmp/')
-#            sudo('\cp /tmp/pyspark_local_template.json ' + jupyterlab_dir + 'pyspark_local_template.json')
-#            sudo('\cp /tmp/py3spark_local_template.json ' + jupyterlab_dir + 'py3spark_local_template.json')
-#            sudo('sed -i \'s/3.5/3.6/g\' {}py3spark_local_template.json'.format(jupyterlab_dir))
-            sudo('mv /tmp/jupyterlab_run.sh {}jupyterlab_run.sh'.format(jupyterlab_dir))
-            sudo('mv /tmp/Dockerfile_jupyterlab {}Dockerfile_jupyterlab'.format(jupyterlab_dir))
-            sudo('mv /tmp/build.sh {}build.sh'.format(jupyterlab_dir))
-            sudo('mv /tmp/start.sh {}start.sh'.format(jupyterlab_dir))
-            sudo('sed -i \'s/nb_user/{}/g\' {}Dockerfile_jupyterlab'.format(os_user, jupyterlab_dir))
-            sudo('sed -i \'s/jupyterlab_image/{}/g\' {}Dockerfile_jupyterlab'.format(jupyterlab_image, jupyterlab_dir))
-            sudo('sed -i \'s/nb_user/{}/g\' {}start.sh'.format(os_user, jupyterlab_dir))
-#            sudo('sed -i \'s/jup_version/{}/g\' {}Dockerfile_jupyterlab'.format(jupyter_version, jupyterlab_dir))
-#            sudo('sed -i \'s/hadoop_version/{}/g\' {}Dockerfile_jupyterlab'.format(os.environ['notebook_hadoop_version'], jupyterlab_dir))
-#            sudo('sed -i \'s/tornado_version/{}/g\' {}Dockerfile_jupyterlab'.format(os.environ['notebook_tornado_version'], jupyterlab_dir))
-#            sudo('sed -i \'s/matplotlib_version/{}/g\' {}Dockerfile_jupyterlab'.format(os.environ['notebook_matplotlib_version'], jupyterlab_dir))
-#            sudo('sed -i \'s/numpy_version/{}/g\' {}Dockerfile_jupyterlab'.format(os.environ['notebook_numpy_version'], jupyterlab_dir))
-#            sudo('sed -i \'s/spark_version/{}/g\' {}Dockerfile_jupyterlab'.format(os.environ['notebook_spark_version'], jupyterlab_dir))
-#            sudo('sed -i \'s/scala_version/{}/g\' {}Dockerfile_jupyterlab'.format(os.environ['notebook_scala_version'], jupyterlab_dir))
-            sudo('sed -i \'s/CONF_PATH/{}/g\' {}jupyterlab_run.sh'.format(jupyterlab_conf_file, jupyterlab_dir))
-            sudo('touch {}'.format(jupyter_conf_file))
-            sudo('echo "c.NotebookApp.ip = \'0.0.0.0\'" >> {}'.format(jupyter_conf_file))
-            sudo('echo "c.NotebookApp.base_url = \'/{0}/\'" >> {1}'.format(exploratory_name, jupyter_conf_file))
-            sudo('echo c.NotebookApp.open_browser = False >> {}'.format(jupyter_conf_file))
-            sudo('echo \'c.NotebookApp.cookie_secret = b"{0}"\' >> {1}'.format(id_generator(), jupyter_conf_file))
-            sudo('''echo "c.NotebookApp.token = u''" >> {}'''.format(jupyter_conf_file))
-            sudo('echo \'c.KernelSpecManager.ensure_native_kernel = False\' >> {}'.format(jupyter_conf_file))
-            sudo('chown datalab-user:datalab-user /opt')
-            sudo(
+            conn.sudo('mkdir {}'.format(jupyterlab_dir))
+#            conn.put(templates_dir + 'pyspark_local_template.json', '/tmp/pyspark_local_template.json')
+#            conn.put(templates_dir + 'py3spark_local_template.json', '/tmp/py3spark_local_template.json')
+            conn.put('/root/Dockerfile_jupyterlab', '/tmp/Dockerfile_jupyterlab')
+            conn.put('/root/scripts/*', '/tmp/')
+#            conn.sudo('\cp /tmp/pyspark_local_template.json ' + jupyterlab_dir + 'pyspark_local_template.json')
+#            conn.sudo('\cp /tmp/py3spark_local_template.json ' + jupyterlab_dir + 'py3spark_local_template.json')
+#            conn.sudo('sed -i \'s/3.5/3.6/g\' {}py3spark_local_template.json'.format(jupyterlab_dir))
+            conn.sudo('mv /tmp/jupyterlab_run.sh {}jupyterlab_run.sh'.format(jupyterlab_dir))
+            conn.sudo('mv /tmp/Dockerfile_jupyterlab {}Dockerfile_jupyterlab'.format(jupyterlab_dir))
+            conn.sudo('mv /tmp/build.sh {}build.sh'.format(jupyterlab_dir))
+            conn.sudo('mv /tmp/start.sh {}start.sh'.format(jupyterlab_dir))
+            conn.sudo('sed -i \'s/nb_user/{}/g\' {}Dockerfile_jupyterlab'.format(os_user, jupyterlab_dir))
+            conn.sudo('sed -i \'s/jupyterlab_image/{}/g\' {}Dockerfile_jupyterlab'.format(jupyterlab_image, jupyterlab_dir))
+            conn.sudo('sed -i \'s/nb_user/{}/g\' {}start.sh'.format(os_user, jupyterlab_dir))
+#            conn.sudo('sed -i \'s/jup_version/{}/g\' {}Dockerfile_jupyterlab'.format(jupyter_version, jupyterlab_dir))
+#            conn.sudo('sed -i \'s/hadoop_version/{}/g\' {}Dockerfile_jupyterlab'.format(os.environ['notebook_hadoop_version'], jupyterlab_dir))
+#            conn.sudo('sed -i \'s/tornado_version/{}/g\' {}Dockerfile_jupyterlab'.format(os.environ['notebook_tornado_version'], jupyterlab_dir))
+#            conn.sudo('sed -i \'s/matplotlib_version/{}/g\' {}Dockerfile_jupyterlab'.format(os.environ['notebook_matplotlib_version'], jupyterlab_dir))
+#            conn.sudo('sed -i \'s/numpy_version/{}/g\' {}Dockerfile_jupyterlab'.format(os.environ['notebook_numpy_version'], jupyterlab_dir))
+#            conn.sudo('sed -i \'s/spark_version/{}/g\' {}Dockerfile_jupyterlab'.format(os.environ['notebook_spark_version'], jupyterlab_dir))
+#            conn.sudo('sed -i \'s/scala_version/{}/g\' {}Dockerfile_jupyterlab'.format(os.environ['notebook_scala_version'], jupyterlab_dir))
+            conn.sudo('sed -i \'s/CONF_PATH/{}/g\' {}jupyterlab_run.sh'.format(jupyterlab_conf_file, jupyterlab_dir))
+            conn.sudo('touch {}'.format(jupyter_conf_file))
+            conn.sudo('echo "c.NotebookApp.ip = \'0.0.0.0\'" >> {}'.format(jupyter_conf_file))
+            conn.sudo('echo "c.NotebookApp.base_url = \'/{0}/\'" >> {1}'.format(exploratory_name, jupyter_conf_file))
+            conn.sudo('echo c.NotebookApp.open_browser = False >> {}'.format(jupyter_conf_file))
+            conn.sudo('echo \'c.NotebookApp.cookie_secret = b"{0}"\' >> {1}'.format(id_generator(), jupyter_conf_file))
+            conn.sudo('''echo "c.NotebookApp.token = u''" >> {}'''.format(jupyter_conf_file))
+            conn.sudo('echo \'c.KernelSpecManager.ensure_native_kernel = False\' >> {}'.format(jupyter_conf_file))
+            conn.sudo('chown datalab-user:datalab-user /opt')
+            conn.sudo(
                 'echo -e "Host git.epam.com\n   HostName git.epam.com\n   ProxyCommand nc -X connect -x {}:3128 %h %p\n" > /home/{}/.ssh/config'.format(
                     edge_ip, os_user))
-            sudo('echo -e "Host github.com\n   HostName github.com\n   ProxyCommand nc -X connect -x {}:3128 %h %p" >> /home/{}/.ssh/config'.format(edge_ip, os_user))
-#            sudo('touch {}'.format(spark_script))
-#            sudo('echo "#!/bin/bash" >> {}'.format(spark_script))
-#            sudo(
+            conn.sudo('echo -e "Host github.com\n   HostName github.com\n   ProxyCommand nc -X connect -x {}:3128 %h %p" >> /home/{}/.ssh/config'.format(edge_ip, os_user))
+#            conn.sudo('touch {}'.format(spark_script))
+#            conn.sudo('echo "#!/bin/bash" >> {}'.format(spark_script))
+#            conn.sudo(
 #                'echo "PYJ=\`find /opt/spark/ -name \'*py4j*.zip\' | tr \'\\n\' \':\' | sed \'s|:$||g\'\`; sed -i \'s|PY4J|\'$PYJ\'|g\' /tmp/pyspark_local_template.json" >> {}'.format(
 #                spark_script))
-        #            sudo(
+        #            conn.sudo(
         #                'echo "sed -i \'14s/:",/:\\/home\\/datalab-user\\/caffe\\/python:\\/home\\/datalab-user\\/pytorch\\/build:",/\' /tmp/pyspark_local_template.json" >> {}'.format(
         #                    spark_script))
-#            sudo('echo \'sed -i "s|SP_VER|{}|g" /tmp/pyspark_local_template.json\' >> {}'.format(os.environ['notebook_spark_version'], spark_script))
-#            sudo(
+#            conn.sudo('echo \'sed -i "s|SP_VER|{}|g" /tmp/pyspark_local_template.json\' >> {}'.format(os.environ['notebook_spark_version'], spark_script))
+#            conn.sudo(
 #                'echo "PYJ=\`find /opt/spark/ -name \'*py4j*.zip\' | tr \'\\n\' \':\' | sed \'s|:$||g\'\`; sed -i \'s|PY4J|\'$PYJ\'|g\' /tmp/py3spark_local_template.json" >> {}'.format(
 #                spark_script))
-        #            sudo(
+        #            conn.sudo(
         #                'echo "sed -i \'14s/:",/:\\/home\\/datalab-user\\/caffe\\/python:\\/home\\/datalab-user\\/pytorch\\/build:",/\' /tmp/py3spark_local_template.json" >> {}'.format(
         #                    spark_script))
-#            sudo('echo \'sed -i "s|SP_VER|{}|g" /tmp/py3spark_local_template.json\' >> {}'.format(os.environ['notebook_spark_version'], spark_script))
-#            sudo('echo "cp /tmp/pyspark_local_template.json /home/{}/.local/share/jupyter/kernels/pyspark_local/kernel.json" >> {}'.format(os_user, spark_script))
-#            sudo(
+#            conn.sudo('echo \'sed -i "s|SP_VER|{}|g" /tmp/py3spark_local_template.json\' >> {}'.format(os.environ['notebook_spark_version'], spark_script))
+#            conn.sudo('echo "cp /tmp/pyspark_local_template.json /home/{}/.local/share/jupyter/kernels/pyspark_local/kernel.json" >> {}'.format(os_user, spark_script))
+#            conn.sudo(
 #                'echo "cp /tmp/py3spark_local_template.json /home/{}/.local/share/jupyter/kernels/py3spark_local/kernel.json" >> {}'.format(
 #                    os_user, spark_script))
-#            sudo('git clone https://github.com/legion-platform/legion.git')
-#            sudo('cp {}sdk/Pipfile {}sdk_Pipfile'.format(legion_dir, jupyterlab_dir))
-#            sudo('cp {}sdk/Pipfile.lock {}sdk_Pipfile.lock'.format(legion_dir, jupyterlab_dir))
-#            sudo('cp {}toolchains/python/Pipfile {}toolchains_Pipfile'.format(legion_dir, jupyterlab_dir))
-#            sudo('cp {}toolchains/python/Pipfile.lock {}toolchains_Pipfile.lock'.format(legion_dir, jupyterlab_dir))
-#            sudo('cp {}cli/Pipfile {}cli_Pipfile'.format(legion_dir, jupyterlab_dir))
-#            sudo('cp {}cli/Pipfile.lock {}cli_Pipfile.lock'.format(legion_dir, jupyterlab_dir))
-#            sudo('cp -r {}sdk {}sdk'.format(legion_dir, jupyterlab_dir))
-#            sudo('cp -r {}toolchains/python {}toolchains_python'.format(legion_dir, jupyterlab_dir))
-#            sudo('cp -r {}cli {}cli'.format(legion_dir, jupyterlab_dir))
+#            conn.sudo('git clone https://github.com/legion-platform/legion.git')
+#            conn.sudo('cp {}sdk/Pipfile {}sdk_Pipfile'.format(legion_dir, jupyterlab_dir))
+#            conn.sudo('cp {}sdk/Pipfile.lock {}sdk_Pipfile.lock'.format(legion_dir, jupyterlab_dir))
+#            conn.sudo('cp {}toolchains/python/Pipfile {}toolchains_Pipfile'.format(legion_dir, jupyterlab_dir))
+#            conn.sudo('cp {}toolchains/python/Pipfile.lock {}toolchains_Pipfile.lock'.format(legion_dir, jupyterlab_dir))
+#            conn.sudo('cp {}cli/Pipfile {}cli_Pipfile'.format(legion_dir, jupyterlab_dir))
+#            conn.sudo('cp {}cli/Pipfile.lock {}cli_Pipfile.lock'.format(legion_dir, jupyterlab_dir))
+#            conn.sudo('cp -r {}sdk {}sdk'.format(legion_dir, jupyterlab_dir))
+#            conn.sudo('cp -r {}toolchains/python {}toolchains_python'.format(legion_dir, jupyterlab_dir))
+#            conn.sudo('cp -r {}cli {}cli'.format(legion_dir, jupyterlab_dir))
         except:
            sys.exit(1)
     else:
         try:
-            sudo(
+            conn.sudo(
                 'sed -i "s/c.NotebookApp.base_url =.*/c.NotebookApp.base_url = \'\/{0}\/\'/" {1}'.format(
                     exploratory_name, jupyter_conf_file))
         except Exception as err:
@@ -343,15 +343,15 @@ def ensure_jupyterlab_files(os_user, jupyterlab_dir, jupyterlab_image, jupyter_c
 def ensure_pyspark_local_kernel(os_user, pyspark_local_path_dir, templates_dir, spark_version):
     if not exists('/home/' + os_user + '/.ensure_dir/pyspark_local_kernel_ensured'):
         try:
-            sudo('mkdir -p ' + pyspark_local_path_dir)
-            sudo('touch ' + pyspark_local_path_dir + 'kernel.json')
-            put(templates_dir + 'pyspark_local_template.json', '/tmp/pyspark_local_template.json')
-            sudo(
+            conn.sudo('mkdir -p ' + pyspark_local_path_dir)
+            conn.sudo('touch ' + pyspark_local_path_dir + 'kernel.json')
+            conn.put(templates_dir + 'pyspark_local_template.json', '/tmp/pyspark_local_template.json')
+            conn.sudo(
                 "PYJ=`find /opt/spark/ -name '*py4j*.zip' | tr '\\n' ':' | sed 's|:$||g'`; sed -i 's|PY4J|'$PYJ'|g' /tmp/pyspark_local_template.json")
-            sudo('sed -i "s|SP_VER|' + spark_version + '|g" /tmp/pyspark_local_template.json')
-            sudo('sed -i \'/PYTHONPATH\"\:/s|\(.*\)"|\\1/home/{0}/caffe/python:/home/{0}/pytorch/build:"|\' /tmp/pyspark_local_template.json'.format(os_user))
-            sudo('\cp /tmp/pyspark_local_template.json ' + pyspark_local_path_dir + 'kernel.json')
-            sudo('touch /home/' + os_user + '/.ensure_dir/pyspark_local_kernel_ensured')
+            conn.sudo('sed -i "s|SP_VER|' + spark_version + '|g" /tmp/pyspark_local_template.json')
+            conn.sudo('sed -i \'/PYTHONPATH\"\:/s|\(.*\)"|\\1/home/{0}/caffe/python:/home/{0}/pytorch/build:"|\' /tmp/pyspark_local_template.json'.format(os_user))
+            conn.sudo('\cp /tmp/pyspark_local_template.json ' + pyspark_local_path_dir + 'kernel.json')
+            conn.sudo('touch /home/' + os_user + '/.ensure_dir/pyspark_local_kernel_ensured')
         except:
             sys.exit(1)
 
@@ -359,15 +359,15 @@ def ensure_pyspark_local_kernel(os_user, pyspark_local_path_dir, templates_dir,
 def ensure_py3spark_local_kernel(os_user, py3spark_local_path_dir, templates_dir, spark_version):
     if not exists('/home/' + os_user + '/.ensure_dir/py3spark_local_kernel_ensured'):
         try:
-            sudo('mkdir -p ' + py3spark_local_path_dir)
-            sudo('touch ' + py3spark_local_path_dir + 'kernel.json')
-            put(templates_dir + 'py3spark_local_template.json', '/tmp/py3spark_local_template.json')
-            sudo(
+            conn.sudo('mkdir -p ' + py3spark_local_path_dir)
+            conn.sudo('touch ' + py3spark_local_path_dir + 'kernel.json')
+            conn.put(templates_dir + 'py3spark_local_template.json', '/tmp/py3spark_local_template.json')
+            conn.sudo(
                 "PYJ=`find /opt/spark/ -name '*py4j*.zip' | tr '\\n' ':' | sed 's|:$||g'`; sed -i 's|PY4J|'$PYJ'|g' /tmp/py3spark_local_template.json")
-            sudo('sed -i "s|SP_VER|' + spark_version + '|g" /tmp/py3spark_local_template.json')
-            sudo('sed -i \'/PYTHONPATH\"\:/s|\(.*\)"|\\1/home/{0}/caffe/python:/home/{0}/pytorch/build:"|\' /tmp/py3spark_local_template.json'.format(os_user))
-            sudo('\cp /tmp/py3spark_local_template.json ' + py3spark_local_path_dir + 'kernel.json')
-            sudo('touch /home/' + os_user + '/.ensure_dir/py3spark_local_kernel_ensured')
+            conn.sudo('sed -i "s|SP_VER|' + spark_version + '|g" /tmp/py3spark_local_template.json')
+            conn.sudo('sed -i \'/PYTHONPATH\"\:/s|\(.*\)"|\\1/home/{0}/caffe/python:/home/{0}/pytorch/build:"|\' /tmp/py3spark_local_template.json'.format(os_user))
+            conn.sudo('\cp /tmp/py3spark_local_template.json ' + py3spark_local_path_dir + 'kernel.json')
+            conn.sudo('touch /home/' + os_user + '/.ensure_dir/py3spark_local_kernel_ensured')
         except:
             sys.exit(1)
 
@@ -421,14 +421,14 @@ def pyspark_kernel(kernels_dir, dataengine_service_version, cluster_name, spark_
 
 def ensure_ciphers():
     try:
-        sudo('echo -e "\nKexAlgorithms curve25519-sha256@libssh.org,diffie-hellman-group-exchange-sha256" >> /etc/ssh/sshd_config')
-        sudo('echo -e "Ciphers aes256-gcm@openssh.com,aes128-gcm@openssh.com,chacha20-poly1305@openssh.com,aes256-ctr,aes192-ctr,aes128-ctr" >> /etc/ssh/sshd_config')
-        sudo('echo -e "\tKexAlgorithms curve25519-sha256@libssh.org,diffie-hellman-group-exchange-sha256" >> /etc/ssh/ssh_config')
-        sudo('echo -e "\tCiphers aes256-gcm@openssh.com,aes128-gcm@openssh.com,chacha20-poly1305@openssh.com,aes256-ctr,aes192-ctr,aes128-ctr" >> /etc/ssh/ssh_config')
+        conn.sudo('echo -e "\nKexAlgorithms curve25519-sha256@libssh.org,diffie-hellman-group-exchange-sha256" >> /etc/ssh/sshd_config')
+        conn.sudo('echo -e "Ciphers aes256-gcm@openssh.com,aes128-gcm@openssh.com,chacha20-poly1305@openssh.com,aes256-ctr,aes192-ctr,aes128-ctr" >> /etc/ssh/sshd_config')
+        conn.sudo('echo -e "\tKexAlgorithms curve25519-sha256@libssh.org,diffie-hellman-group-exchange-sha256" >> /etc/ssh/ssh_config')
+        conn.sudo('echo -e "\tCiphers aes256-gcm@openssh.com,aes128-gcm@openssh.com,chacha20-poly1305@openssh.com,aes256-ctr,aes192-ctr,aes128-ctr" >> /etc/ssh/ssh_config')
         try:
-            sudo('service ssh reload')
+            conn.sudo('service ssh reload')
         except:
-            sudo('service sshd reload')
+            conn.sudo('service sshd reload')
     except Exception as err:
         traceback.print_exc()
         print('Failed to ensure ciphers: ', str(err))
@@ -447,13 +447,13 @@ def install_r_pkg(requisites):
             else:
                 vers = '"{}"'.format(vers)
             if name == 'sparklyr':
-                run('sudo -i R -e \'devtools::install_version("{0}", version = {1}, repos = "http://cran.us.r-project.org", dependencies = NA)\' 2>&1 | '
+                conn.run('sudo -i R -e \'devtools::install_version("{0}", version = {1}, repos = "http://cran.us.r-project.org", dependencies = NA)\' 2>&1 | '
                         'tee /tmp/tee.tmp; if ! grep -w -E  "({2})" /tmp/tee.tmp > /tmp/install_{0}.log; then  echo "" > /tmp/install_{0}.log;fi'.format(name, vers, error_parser))
             else:
-                sudo('R -e \'devtools::install_version("{0}", version = {1}, repos = "https://cloud.r-project.org", dependencies = NA)\' 2>&1 | '
+                conn.sudo('R -e \'devtools::install_version("{0}", version = {1}, repos = "https://cloud.r-project.org", dependencies = NA)\' 2>&1 | '
                          'tee /tmp/tee.tmp; if ! grep -w -E  "({2})" /tmp/tee.tmp > /tmp/install_{0}.log; then  echo "" > /tmp/install_{0}.log;fi'.format(name, vers, error_parser))
-            dep = sudo('grep "(NA.*->". /tmp/tee.tmp | awk \'{print $1}\'').replace('\r\n', ' ')
-            dep_ver = sudo('grep "(NA.*->". /tmp/tee.tmp | awk \'{print $4}\'').replace('\r\n', ' ').replace(')', '').split(' ')
+            dep = conn.sudo('grep "(NA.*->". /tmp/tee.tmp | awk \'{print $1}\'').replace('\r\n', ' ')
+            dep_ver = conn.sudo('grep "(NA.*->". /tmp/tee.tmp | awk \'{print $4}\'').replace('\r\n', ' ').replace(')', '').split(' ')
             if dep == '':
                 dep = []
             else:
@@ -464,9 +464,9 @@ def install_r_pkg(requisites):
                     else:
                         dep[n] = '{} v.{}'.format(dep[n], dep_ver[n])
                 dep = [i for i in dep if i]
-            err = sudo('cat /tmp/install_{0}.log'.format(name)).replace('"', "'")
-            sudo('R -e \'installed.packages()[,c(3:4)]\' | if ! grep -w {0} > /tmp/install_{0}.list; then  echo "" > /tmp/install_{0}.list;fi'.format(name))
-            res = sudo('cat /tmp/install_{0}.list'.format(name))
+            err = conn.sudo('cat /tmp/install_{0}.log'.format(name)).replace('"', "'")
+            conn.sudo('R -e \'installed.packages()[,c(3:4)]\' | if ! grep -w {0} > /tmp/install_{0}.list; then  echo "" > /tmp/install_{0}.list;fi'.format(name))
+            res = conn.sudo('cat /tmp/install_{0}.list'.format(name))
             if err:
                 status_msg = 'installation_error'
                 if 'couldn\'t find package \'{}\''.format(name) in err:
@@ -476,8 +476,8 @@ def install_r_pkg(requisites):
                 version = ansi_escape.sub('', res).split("\r\n")[0].split('"')[1]
                 status_msg = 'installed'
             if 'Error in download_version_url(package, version, repos, type) :' in err or 'Error in parse_deps(paste(spec,' in err:
-                sudo('R -e \'install.packages("versions", repos="https://cloud.r-project.org", dep=TRUE)\'')
-                versions = sudo('R -e \'library(versions); available.versions("' + name + '")\' 2>&1 | grep -A 50 '
+                conn.sudo('R -e \'install.packages("versions", repos="https://cloud.r-project.org", dep=TRUE)\'')
+                versions = conn.sudo('R -e \'library(versions); available.versions("' + name + '")\' 2>&1 | grep -A 50 '
                                     '\'date available\' | awk \'{print $2}\'').replace('\r\n', ' ')[5:].split(' ')
                 if versions != ['']:
                     status_msg = 'invalid_version'
@@ -498,18 +498,18 @@ def install_r_pkg(requisites):
 
 def update_spark_jars(jars_dir='/opt/jars'):
     try:
-        configs = sudo('find /opt/ /etc/ /usr/lib/ -name spark-defaults.conf -type f').split('\r\n')
+        configs = conn.sudo('find /opt/ /etc/ /usr/lib/ -name spark-defaults.conf -type f').split('\r\n')
         if exists(jars_dir):
             for conf in filter(None, configs):
                 des_path = ''
-                all_jars = sudo('find {0} -name "*.jar"'.format(jars_dir)).split('\r\n')
+                all_jars = conn.sudo('find {0} -name "*.jar"'.format(jars_dir)).split('\r\n')
                 if ('-des-' in conf):
                     des_path = '/'.join(conf.split('/')[:3])
                     all_jars = find_des_jars(all_jars, des_path)
-                sudo('''sed -i '/^# Generated\|^spark.jars/d' {0}'''.format(conf))
-                sudo('echo "# Generated spark.jars by DataLab from {0}\nspark.jars {1}" >> {2}'
+                conn.sudo('''sed -i '/^# Generated\|^spark.jars/d' {0}'''.format(conf))
+                conn.sudo('echo "# Generated spark.jars by DataLab from {0}\nspark.jars {1}" >> {2}'
                      .format(','.join(filter(None, [jars_dir, des_path])), ','.join(all_jars), conf))
-                # sudo("sed -i 's/^[[:space:]]*//' {0}".format(conf))
+                # conn.sudo("sed -i 's/^[[:space:]]*//' {0}".format(conf))
         else:
             print("Can't find directory {0} with jar files".format(jars_dir))
     except Exception as err:
@@ -527,28 +527,28 @@ def install_java_pkg(requisites):
     ivy_settings = 'ivysettings.xml'
     dest_dir = '/opt/jars/java'
     try:
-        ivy_jar = sudo('find /opt /usr -name "*ivy-{0}.jar" | head -n 1'.format(os.environ['notebook_ivy_version']))
-        sudo('mkdir -p {0} {1}'.format(ivy_dir, dest_dir))
-        put('{0}{1}'.format(templates_dir, ivy_settings), '{0}/{1}'.format(ivy_dir, ivy_settings), use_sudo=True)
-        proxy_string = sudo('cat /etc/profile | grep http_proxy | cut -f2 -d"="')
+        ivy_jar = conn.sudo('find /opt /usr -name "*ivy-{0}.jar" | head -n 1'.format(os.environ['notebook_ivy_version']))
+        conn.sudo('mkdir -p {0} {1}'.format(ivy_dir, dest_dir))
+        conn.put('{0}{1}'.format(templates_dir, ivy_settings), '{0}/{1}'.format(ivy_dir, ivy_settings), use_sudo=True)
+        proxy_string = conn.sudo('cat /etc/profile | grep http_proxy | cut -f2 -d"="')
         proxy_re = '(?P<proto>http.*)://(?P<host>[^:/ ]+):(?P<port>[0-9]*)'
         proxy_find = re.search(proxy_re, proxy_string)
         java_proxy = "export _JAVA_OPTIONS='-Dhttp.proxyHost={0} -Dhttp.proxyPort={1} \
             -Dhttps.proxyHost={0} -Dhttps.proxyPort={1}'".format(proxy_find.group('host'), proxy_find.group('port'))
         for java_pkg in requisites:
-            sudo('rm -rf {0}'.format(ivy_cache_dir))
-            sudo('mkdir -p {0}'.format(ivy_cache_dir))
+            conn.sudo('rm -rf {0}'.format(ivy_cache_dir))
+            conn.sudo('mkdir -p {0}'.format(ivy_cache_dir))
             group, artifact, version, override = java_pkg
             print("Installing package (override: {3}): {0}:{1}:{2}".format(group, artifact, version, override))
-            sudo('{8}; java -jar {0} -settings {1}/{2} -cache {3} -dependency {4} {5} {6} 2>&1 | tee /tmp/tee.tmp; \
+            conn.sudo('{8}; java -jar {0} -settings {1}/{2} -cache {3} -dependency {4} {5} {6} 2>&1 | tee /tmp/tee.tmp; \
                 if ! grep -w -E  "({7})" /tmp/tee.tmp > /tmp/install_{5}.log; then echo "" > /tmp/install_{5}.log;fi'
                  .format(ivy_jar, ivy_dir, ivy_settings, ivy_cache_dir, group, artifact, version, error_parser, java_proxy))
-            err = sudo('cat /tmp/install_{0}.log'.format(artifact)).replace('"', "'").strip()
-            sudo('find {0} -name "{1}*.jar" | head -n 1 | rev | cut -f1 -d "/" | rev | \
+            err = conn.sudo('cat /tmp/install_{0}.log'.format(artifact)).replace('"', "'").strip()
+            conn.sudo('find {0} -name "{1}*.jar" | head -n 1 | rev | cut -f1 -d "/" | rev | \
                 if ! grep -w -i {1} > /tmp/install_{1}.list; then echo "" > /tmp/install_{1}.list;fi'.format(ivy_cache_dir, artifact))
-            res = sudo('cat /tmp/install_{0}.list'.format(artifact))
+            res = conn.sudo('cat /tmp/install_{0}.list'.format(artifact))
             if res:
-                sudo('cp -f $(find {0} -name "*.jar" | xargs) {1}'.format(ivy_cache_dir, dest_dir))
+                conn.sudo('cp -f $(find {0} -name "*.jar" | xargs) {1}'.format(ivy_cache_dir, dest_dir))
                 status.append({"group": "java", "name": "{0}:{1}".format(group, artifact), "version": version, "status": "installed"})
             else:
                 status.append({"group": "java", "name": "{0}:{1}".format(group, artifact), "status": "installation_error", "error_message": err})
@@ -565,7 +565,7 @@ def install_java_pkg(requisites):
 def get_available_r_pkgs():
     try:
         r_pkgs = dict()
-        sudo('R -e \'write.table(available.packages(contriburl="https://cloud.r-project.org/src/contrib"), file="/tmp/r.csv", row.names=F, col.names=F, sep=",")\'')
+        conn.sudo('R -e \'write.table(available.packages(contriburl="https://cloud.r-project.org/src/contrib"), file="/tmp/r.csv", row.names=F, col.names=F, sep=",")\'')
         get("/tmp/r.csv", "r.csv")
         with open('r.csv', 'r') as csvfile:
             reader = csv.reader(csvfile, delimiter=',')
@@ -580,16 +580,16 @@ def get_available_r_pkgs():
 def ensure_toree_local_kernel(os_user, toree_link, scala_kernel_path, files_dir, scala_version, spark_version):
     if not exists('/home/' + os_user + '/.ensure_dir/toree_local_kernel_ensured'):
         try:
-            sudo('pip install ' + toree_link + ' --no-cache-dir')
-            sudo('ln -s /opt/spark/ /usr/local/spark')
-            sudo('jupyter toree install')
-            sudo('mv ' + scala_kernel_path + 'lib/* /tmp/')
-            put(files_dir + 'toree-assembly-0.3.0.jar', '/tmp/toree-assembly-0.3.0.jar')
-            sudo('mv /tmp/toree-assembly-0.3.0.jar ' + scala_kernel_path + 'lib/')
-            sudo(
+            conn.sudo('pip install ' + toree_link + ' --no-cache-dir')
+            conn.sudo('ln -s /opt/spark/ /usr/local/spark')
+            conn.sudo('jupyter toree install')
+            conn.sudo('mv ' + scala_kernel_path + 'lib/* /tmp/')
+            conn.put(files_dir + 'toree-assembly-0.3.0.jar', '/tmp/toree-assembly-0.3.0.jar')
+            conn.sudo('mv /tmp/toree-assembly-0.3.0.jar ' + scala_kernel_path + 'lib/')
+            conn.sudo(
                 'sed -i "s|Apache Toree - Scala|Local Apache Toree - Scala (Scala-' + scala_version +
                 ', Spark-' + spark_version + ')|g" ' + scala_kernel_path + 'kernel.json')
-            sudo('touch /home/' + os_user + '/.ensure_dir/toree_local_kernel_ensured')
+            conn.sudo('touch /home/' + os_user + '/.ensure_dir/toree_local_kernel_ensured')
         except:
             sys.exit(1)
 
@@ -598,74 +598,74 @@ def install_ungit(os_user, notebook_name, edge_ip):
     if not exists('/home/{}/.ensure_dir/ungit_ensured'.format(os_user)):
         try:
             manage_npm_pkg('-g install ungit@{}'.format(os.environ['notebook_ungit_version']))
-            put('/root/templates/ungit.service', '/tmp/ungit.service')
-            sudo("sed -i 's|OS_USR|{}|' /tmp/ungit.service".format(os_user))
-            http_proxy = run('echo $http_proxy')
-            sudo("sed -i 's|PROXY_HOST|{}|g' /tmp/ungit.service".format(http_proxy))
-            sudo("sed -i 's|NOTEBOOK_NAME|{}|' /tmp/ungit.service".format(
+            conn.put('/root/templates/ungit.service', '/tmp/ungit.service')
+            conn.sudo("sed -i 's|OS_USR|{}|' /tmp/ungit.service".format(os_user))
+            http_proxy = conn.run('echo $http_proxy')
+            conn.sudo("sed -i 's|PROXY_HOST|{}|g' /tmp/ungit.service".format(http_proxy))
+            conn.sudo("sed -i 's|NOTEBOOK_NAME|{}|' /tmp/ungit.service".format(
                 notebook_name))
-            sudo("mv -f /tmp/ungit.service /etc/systemd/system/ungit.service")
-            run('git config --global user.name "Example User"')
-            run('git config --global user.email "example@example.com"')
-            run('mkdir -p ~/.git/templates/hooks')
-            put('/root/scripts/git_pre_commit.py', '~/.git/templates/hooks/pre-commit', mode=0o755)
-            run('git config --global init.templatedir ~/.git/templates')
-            run('touch ~/.gitignore')
-            run('git config --global core.excludesfile ~/.gitignore')
-            run('echo ".ipynb_checkpoints/" >> ~/.gitignore')
-            run('echo "spark-warehouse/" >> ~/.gitignore')
-            run('echo "metastore_db/" >> ~/.gitignore')
-            run('echo "derby.log" >> ~/.gitignore')
-            sudo(
+            conn.sudo("mv -f /tmp/ungit.service /etc/systemd/system/ungit.service")
+            conn.run('git config --global user.name "Example User"')
+            conn.run('git config --global user.email "example@example.com"')
+            conn.run('mkdir -p ~/.git/templates/hooks')
+            conn.put('/root/scripts/git_pre_commit.py', '~/.git/templates/hooks/pre-commit', mode=0o755)
+            conn.run('git config --global init.templatedir ~/.git/templates')
+            conn.run('touch ~/.gitignore')
+            conn.run('git config --global core.excludesfile ~/.gitignore')
+            conn.run('echo ".ipynb_checkpoints/" >> ~/.gitignore')
+            conn.run('echo "spark-warehouse/" >> ~/.gitignore')
+            conn.run('echo "metastore_db/" >> ~/.gitignore')
+            conn.run('echo "derby.log" >> ~/.gitignore')
+            conn.sudo(
                 'echo -e "Host git.epam.com\n   HostName git.epam.com\n   ProxyCommand nc -X connect -x {}:3128 %h %p\n" > /home/{}/.ssh/config'.format(
                     edge_ip, os_user))
-            sudo(
+            conn.sudo(
                 'echo -e "Host github.com\n   HostName github.com\n   ProxyCommand nc -X connect -x {}:3128 %h %p" >> /home/{}/.ssh/config'.format(
                     edge_ip, os_user))
-            sudo(
+            conn.sudo(
                 'echo -e "Host gitlab.com\n   HostName gitlab.com\n   ProxyCommand nc -X connect -x {}:3128 %h %p" >> /home/{}/.ssh/config'.format(
                     edge_ip, os_user))
-            sudo('systemctl daemon-reload')
-            sudo('systemctl enable ungit.service')
-            sudo('systemctl start ungit.service')
-            sudo('touch /home/{}/.ensure_dir/ungit_ensured'.format(os_user))
+            conn.sudo('systemctl daemon-reload')
+            conn.sudo('systemctl enable ungit.service')
+            conn.sudo('systemctl start ungit.service')
+            conn.sudo('touch /home/{}/.ensure_dir/ungit_ensured'.format(os_user))
         except:
             sys.exit(1)
     else:
         try:
-            sudo("sed -i 's|--rootPath=/.*-ungit|--rootPath=/{}-ungit|' /etc/systemd/system/ungit.service".format(
+            conn.sudo("sed -i 's|--rootPath=/.*-ungit|--rootPath=/{}-ungit|' /etc/systemd/system/ungit.service".format(
                 notebook_name))
-            http_proxy = run('echo $http_proxy')
-            sudo("sed -i 's|HTTPS_PROXY=.*3128|HTTPS_PROXY={}|g' /etc/systemd/system/ungit.service".format(http_proxy))
-            sudo("sed -i 's|HTTP_PROXY=.*3128|HTTP_PROXY={}|g' /etc/systemd/system/ungit.service".format(http_proxy))
-            sudo('systemctl daemon-reload')
-            sudo('systemctl restart ungit.service')
+            http_proxy = conn.run('echo $http_proxy')
+            conn.sudo("sed -i 's|HTTPS_PROXY=.*3128|HTTPS_PROXY={}|g' /etc/systemd/system/ungit.service".format(http_proxy))
+            conn.sudo("sed -i 's|HTTP_PROXY=.*3128|HTTP_PROXY={}|g' /etc/systemd/system/ungit.service".format(http_proxy))
+            conn.sudo('systemctl daemon-reload')
+            conn.sudo('systemctl restart ungit.service')
         except:
             sys.exit(1)
-    run('git config --global http.proxy $http_proxy')
-    run('git config --global https.proxy $https_proxy')
+    conn.run('git config --global http.proxy $http_proxy')
+    conn.run('git config --global https.proxy $https_proxy')
 
 
 def install_inactivity_checker(os_user, ip_address, rstudio=False):
     if not exists('/home/{}/.ensure_dir/inactivity_ensured'.format(os_user)):
         try:
             if not exists('/opt/inactivity'):
-                sudo('mkdir /opt/inactivity')
-            put('/root/templates/inactive.service', '/etc/systemd/system/inactive.service', use_sudo=True)
-            put('/root/templates/inactive.timer', '/etc/systemd/system/inactive.timer', use_sudo=True)
+                conn.sudo('mkdir /opt/inactivity')
+            conn.put('/root/templates/inactive.service', '/etc/systemd/system/inactive.service', use_sudo=True)
+            conn.put('/root/templates/inactive.timer', '/etc/systemd/system/inactive.timer', use_sudo=True)
             if rstudio:
-                put('/root/templates/inactive_rs.sh', '/opt/inactivity/inactive.sh', use_sudo=True)
+                conn.put('/root/templates/inactive_rs.sh', '/opt/inactivity/inactive.sh', use_sudo=True)
             else:
-                put('/root/templates/inactive.sh', '/opt/inactivity/inactive.sh', use_sudo=True)
-            sudo("sed -i 's|IP_ADRESS|{}|g' /opt/inactivity/inactive.sh".format(ip_address))
-            sudo("chmod 755 /opt/inactivity/inactive.sh")
-            sudo("chown root:root /etc/systemd/system/inactive.service")
-            sudo("chown root:root /etc/systemd/system/inactive.timer")
-            sudo("date +%s > /opt/inactivity/local_inactivity")
-            sudo('systemctl daemon-reload')
-            sudo('systemctl enable inactive.timer')
-            sudo('systemctl start inactive.timer')
-            sudo('touch /home/{}/.ensure_dir/inactive_ensured'.format(os_user))
+                conn.put('/root/templates/inactive.sh', '/opt/inactivity/inactive.sh', use_sudo=True)
+            conn.sudo("sed -i 's|IP_ADRESS|{}|g' /opt/inactivity/inactive.sh".format(ip_address))
+            conn.sudo("chmod 755 /opt/inactivity/inactive.sh")
+            conn.sudo("chown root:root /etc/systemd/system/inactive.service")
+            conn.sudo("chown root:root /etc/systemd/system/inactive.timer")
+            conn.sudo("date +%s > /opt/inactivity/local_inactivity")
+            conn.sudo('systemctl daemon-reload')
+            conn.sudo('systemctl enable inactive.timer')
+            conn.sudo('systemctl start inactive.timer')
+            conn.sudo('touch /home/{}/.ensure_dir/inactive_ensured'.format(os_user))
         except Exception as err:
             print('Failed to setup inactivity check service!', str(err))
             sys.exit(1)
@@ -673,8 +673,8 @@ def install_inactivity_checker(os_user, ip_address, rstudio=False):
 
 def set_git_proxy(os_user, hostname, keyfile, proxy_host):
     init_datalab_connection(hostname, os_user, keyfile)
-    run('git config --global http.proxy {}'.format(proxy_host))
-    run('git config --global https.proxy {}'.format(proxy_host))
+    conn.run('git config --global http.proxy {}'.format(proxy_host))
+    conn.run('git config --global https.proxy {}'.format(proxy_host))
     close_connection()
 
 
@@ -685,13 +685,13 @@ def set_mongo_parameters(client, mongo_parameters):
 
 def install_r_packages(os_user):
     if not exists('/home/' + os_user + '/.ensure_dir/r_packages_ensured'):
-        sudo('R -e "install.packages(\'devtools\', repos = \'https://cloud.r-project.org\')"')
-        sudo('R -e "install.packages(\'knitr\', repos = \'https://cloud.r-project.org\')"')
-        sudo('R -e "install.packages(\'ggplot2\', repos = \'https://cloud.r-project.org\')"')
-        sudo('R -e "install.packages(c(\'devtools\',\'mplot\', \'googleVis\'), '
+        conn.sudo('R -e "install.packages(\'devtools\', repos = \'https://cloud.r-project.org\')"')
+        conn.sudo('R -e "install.packages(\'knitr\', repos = \'https://cloud.r-project.org\')"')
+        conn.sudo('R -e "install.packages(\'ggplot2\', repos = \'https://cloud.r-project.org\')"')
+        conn.sudo('R -e "install.packages(c(\'devtools\',\'mplot\', \'googleVis\'), '
              'repos = \'https://cloud.r-project.org\'); require(devtools); install_github(\'ramnathv/rCharts\')"')
-        sudo('R -e \'install.packages("versions", repos="https://cloud.r-project.org", dep=TRUE)\'')
-        sudo('touch /home/' + os_user + '/.ensure_dir/r_packages_ensured')
+        conn.sudo('R -e \'install.packages("versions", repos="https://cloud.r-project.org", dep=TRUE)\'')
+        conn.sudo('touch /home/' + os_user + '/.ensure_dir/r_packages_ensured')
 
 
 def add_breeze_library_local(os_user):
@@ -699,25 +699,25 @@ def add_breeze_library_local(os_user):
         try:
             breeze_tmp_dir = '/tmp/breeze_tmp_local/'
             jars_dir = '/opt/jars/'
-            sudo('mkdir -p {}'.format(breeze_tmp_dir))
-            sudo('wget https://repo1.maven.org/maven2/org/scalanlp/breeze_{0}/{1}/breeze_{0}-{1}.jar -O \
+            conn.sudo('mkdir -p {}'.format(breeze_tmp_dir))
+            conn.sudo('wget https://repo1.maven.org/maven2/org/scalanlp/breeze_{0}/{1}/breeze_{0}-{1}.jar -O \
                     {2}breeze_{0}-{1}.jar'.format('2.11', '0.12', breeze_tmp_dir))
-            sudo('wget https://repo1.maven.org/maven2/org/scalanlp/breeze-natives_{0}/{1}/breeze-natives_{0}-{1}.jar -O \
+            conn.sudo('wget https://repo1.maven.org/maven2/org/scalanlp/breeze-natives_{0}/{1}/breeze-natives_{0}-{1}.jar -O \
                     {2}breeze-natives_{0}-{1}.jar'.format('2.11', '0.12', breeze_tmp_dir))
-            sudo('wget https://repo1.maven.org/maven2/org/scalanlp/breeze-viz_{0}/{1}/breeze-viz_{0}-{1}.jar -O \
+            conn.sudo('wget https://repo1.maven.org/maven2/org/scalanlp/breeze-viz_{0}/{1}/breeze-viz_{0}-{1}.jar -O \
                     {2}breeze-viz_{0}-{1}.jar'.format('2.11', '0.12', breeze_tmp_dir))
-            sudo('wget https://repo1.maven.org/maven2/org/scalanlp/breeze-macros_{0}/{1}/breeze-macros_{0}-{1}.jar -O \
+            conn.sudo('wget https://repo1.maven.org/maven2/org/scalanlp/breeze-macros_{0}/{1}/breeze-macros_{0}-{1}.jar -O \
                     {2}breeze-macros_{0}-{1}.jar'.format('2.11', '0.12', breeze_tmp_dir))
-            sudo('wget https://repo1.maven.org/maven2/org/scalanlp/breeze-parent_{0}/{1}/breeze-parent_{0}-{1}.jar -O \
+            conn.sudo('wget https://repo1.maven.org/maven2/org/scalanlp/breeze-parent_{0}/{1}/breeze-parent_{0}-{1}.jar -O \
                     {2}breeze-parent_{0}-{1}.jar'.format('2.11', '0.12', breeze_tmp_dir))
-            sudo('wget https://repo1.maven.org/maven2/org/jfree/jfreechart/{0}/jfreechart-{0}.jar -O \
+            conn.sudo('wget https://repo1.maven.org/maven2/org/jfree/jfreechart/{0}/jfreechart-{0}.jar -O \
                     {1}jfreechart-{0}.jar'.format('1.0.19', breeze_tmp_dir))
-            sudo('wget https://repo1.maven.org/maven2/org/jfree/jcommon/{0}/jcommon-{0}.jar -O \
+            conn.sudo('wget https://repo1.maven.org/maven2/org/jfree/jcommon/{0}/jcommon-{0}.jar -O \
                     {1}jcommon-{0}.jar'.format('1.0.24', breeze_tmp_dir))
-            sudo('wget --no-check-certificate https://brunelvis.org/jar/spark-kernel-brunel-all-{0}.jar -O \
+            conn.sudo('wget --no-check-certificate https://brunelvis.org/jar/spark-kernel-brunel-all-{0}.jar -O \
                     {1}spark-kernel-brunel-all-{0}.jar'.format('2.3', breeze_tmp_dir))
-            sudo('mv {0}* {1}'.format(breeze_tmp_dir, jars_dir))
-            sudo('touch /home/' + os_user + '/.ensure_dir/breeze_local_ensured')
+            conn.sudo('mv {0}* {1}'.format(breeze_tmp_dir, jars_dir))
+            conn.sudo('touch /home/' + os_user + '/.ensure_dir/breeze_local_ensured')
         except:
             sys.exit(1)
 
@@ -725,22 +725,22 @@ def add_breeze_library_local(os_user):
 def configure_data_engine_service_pip(hostname, os_user, keyfile, emr=False):
     init_datalab_connection(hostname, os_user, keyfile)
     manage_pkg('-y install', 'remote', 'python3-pip')
-    if not exists('/usr/bin/pip3') and sudo("python3.4 -V 2>/dev/null | awk '{print $2}'"):
-        sudo('ln -s /usr/bin/pip-3.4 /usr/bin/pip3')
-    elif not exists('/usr/bin/pip3') and sudo("python3.5 -V 2>/dev/null | awk '{print $2}'"):
-        sudo('ln -s /usr/bin/pip-3.5 /usr/bin/pip3')
-    elif not exists('/usr/bin/pip3') and sudo("python3.6 -V 2>/dev/null | awk '{print $2}'"):
-        sudo('ln -s /usr/bin/pip-3.6 /usr/bin/pip3')
-    elif not exists('/usr/bin/pip3') and sudo("python3.7 -V 2>/dev/null | awk '{print $2}'"):
-        sudo('ln -s /usr/bin/pip-3.7 /usr/bin/pip3')
-    elif not exists('/usr/bin/pip3') and sudo("python3.8 -V 2>/dev/null | awk '{print $2}'"):
-        sudo('ln -s /usr/bin/pip-3.8 /usr/bin/pip3')
+    if not exists('/usr/bin/pip3') and conn.sudo("python3.4 -V 2>/dev/null | awk '{print $2}'"):
+        conn.sudo('ln -s /usr/bin/pip-3.4 /usr/bin/pip3')
+    elif not exists('/usr/bin/pip3') and conn.sudo("python3.5 -V 2>/dev/null | awk '{print $2}'"):
+        conn.sudo('ln -s /usr/bin/pip-3.5 /usr/bin/pip3')
+    elif not exists('/usr/bin/pip3') and conn.sudo("python3.6 -V 2>/dev/null | awk '{print $2}'"):
+        conn.sudo('ln -s /usr/bin/pip-3.6 /usr/bin/pip3')
+    elif not exists('/usr/bin/pip3') and conn.sudo("python3.7 -V 2>/dev/null | awk '{print $2}'"):
+        conn.sudo('ln -s /usr/bin/pip-3.7 /usr/bin/pip3')
+    elif not exists('/usr/bin/pip3') and conn.sudo("python3.8 -V 2>/dev/null | awk '{print $2}'"):
+        conn.sudo('ln -s /usr/bin/pip-3.8 /usr/bin/pip3')
     if emr:
-        sudo('pip3 install -U pip=={}'.format(os.environ['conf_pip_version']))
-        sudo('ln -s /usr/local/bin/pip3.7 /bin/pip3.7')
-    sudo('echo "export PATH=$PATH:/usr/local/bin" >> /etc/profile')
-    sudo('source /etc/profile')
-    run('source /etc/profile')
+        conn.sudo('pip3 install -U pip=={}'.format(os.environ['conf_pip_version']))
+        conn.sudo('ln -s /usr/local/bin/pip3.7 /bin/pip3.7')
+    conn.sudo('echo "export PATH=$PATH:/usr/local/bin" >> /etc/profile')
+    conn.sudo('source /etc/profile')
+    conn.run('source /etc/profile')
     close_connection()
 
 
@@ -766,7 +766,7 @@ def remove_rstudio_dataengines_kernel(cluster_name, os_user):
         with open('.Rprofile', 'w') as f:
             for line in conf:
                 f.write('{}\n'.format(line))
-        put('.Rprofile', '/home/{}/.Rprofile'.format(os_user))
+        conn.put('.Rprofile', '/home/{}/.Rprofile'.format(os_user))
         get('/home/{}/.Renviron'.format(os_user), 'Renviron')
         data = open('Renviron').read()
         conf = filter(None, data.split('\n'))
@@ -784,11 +784,11 @@ def remove_rstudio_dataengines_kernel(cluster_name, os_user):
         with open('.Renviron', 'w') as f:
             for line in conf:
                 f.write('{}\n'.format(line))
-        put('.Renviron', '/home/{}/.Renviron'.format(os_user))
+        conn.put('.Renviron', '/home/{}/.Renviron'.format(os_user))
         if len(conf) == 1:
-           sudo('rm -f /home/{}/.ensure_dir/rstudio_dataengine_ensured'.format(os_user))
-           sudo('rm -f /home/{}/.ensure_dir/rstudio_dataengine-service_ensured'.format(os_user))
-        sudo('''R -e "source('/home/{}/.Rprofile')"'''.format(os_user))
+           conn.sudo('rm -f /home/{}/.ensure_dir/rstudio_dataengine_ensured'.format(os_user))
+           conn.sudo('rm -f /home/{}/.ensure_dir/rstudio_dataengine-service_ensured'.format(os_user))
+        conn.sudo('''R -e "source('/home/{}/.Rprofile')"'''.format(os_user))
     except:
         sys.exit(1)
 
@@ -796,18 +796,18 @@ def remove_rstudio_dataengines_kernel(cluster_name, os_user):
 def restart_zeppelin(creds=False, os_user='', hostname='', keyfile=''):
     if creds:
         init_datalab_connection(hostname, os_user, keyfile)
-    sudo("systemctl daemon-reload")
-    sudo("systemctl restart zeppelin-notebook")
+    conn.sudo("systemctl daemon-reload")
+    conn.sudo("systemctl restart zeppelin-notebook")
     if creds:
         close_connection()
 
 def get_spark_memory(creds=False, os_user='', hostname='', keyfile=''):
     if creds:
         with settings(host_string='{}@{}'.format(os_user, hostname)):
-            mem = sudo('free -m | grep Mem | tr -s " " ":" | cut -f 2 -d ":"')
+            mem = conn.sudo('free -m | grep Mem | tr -s " " ":" | cut -f 2 -d ":"')
             instance_memory = int(mem)
     else:
-        mem = sudo('free -m | grep Mem | tr -s " " ":" | cut -f 2 -d ":"')
+        mem = conn.sudo('free -m | grep Mem | tr -s " " ":" | cut -f 2 -d ":"')
         instance_memory = int(mem)
     try:
         if instance_memory > int(os.environ['dataengine_expl_instance_memory']):
@@ -844,8 +844,8 @@ def update_pyopenssl_lib(os_user):
     if not exists('/home/{}/.ensure_dir/pyopenssl_updated'.format(os_user)):
         try:
             if exists('/usr/bin/pip3'):
-                sudo('pip3 install -U pyopenssl')
-            sudo('touch /home/{}/.ensure_dir/pyopenssl_updated'.format(os_user))
+                conn.sudo('pip3 install -U pyopenssl')
+            conn.sudo('touch /home/{}/.ensure_dir/pyopenssl_updated'.format(os_user))
         except:
             sys.exit(1)
 
@@ -853,9 +853,9 @@ def update_pyopenssl_lib(os_user):
 def find_cluster_kernels():
     try:
         with settings(sudo_user='root'):
-            de = [i for i in sudo('find /opt/ -maxdepth 1 -name "*-de-*" -type d | rev | '
+            de = [i for i in conn.sudo('find /opt/ -maxdepth 1 -name "*-de-*" -type d | rev | '
                                   'cut -f 1 -d "/" | rev | xargs -r').split(' ') if i != '']
-            des =  [i for i in sudo('find /opt/ -maxdepth 2 -name "*-des-*" -type d | rev | '
+            des =  [i for i in conn.sudo('find /opt/ -maxdepth 2 -name "*-des-*" -type d | rev | '
                                     'cut -f 1,2 -d "/" | rev | xargs -r').split(' ') if i != '']
         return (de, des)
     except:
@@ -899,9 +899,9 @@ def update_zeppelin_interpreters(multiple_clusters, r_enabled, interpreter_mode=
         if interpreter_mode != 'remote':
             with open(local_interpreters_config, 'w') as f:
                 f.write(json.dumps(data, indent=2))
-            put(local_interpreters_config, local_interpreters_config)
-            sudo('cp -f {0} {1}'.format(local_interpreters_config, interpreters_config))
-            sudo('systemctl restart zeppelin-notebook')
+            conn.put(local_interpreters_config, local_interpreters_config)
+            conn.sudo('cp -f {0} {1}'.format(local_interpreters_config, interpreters_config))
+            conn.sudo('systemctl restart zeppelin-notebook')
         else:
             with open(interpreters_config, 'w') as f:
                 f.write(json.dumps(data, indent=2))
@@ -914,8 +914,8 @@ def update_zeppelin_interpreters(multiple_clusters, r_enabled, interpreter_mode=
 def update_hosts_file(os_user):
     try:
         if not exists('/home/{}/.ensure_dir/hosts_file_updated'.format(os_user)):
-            sudo('sed -i "s/^127.0.0.1 localhost/127.0.0.1 localhost localhost.localdomain/g" /etc/hosts')
-            sudo('touch /home/{}/.ensure_dir/hosts_file_updated'.format(os_user))
+            conn.sudo('sed -i "s/^127.0.0.1 localhost/127.0.0.1 localhost localhost.localdomain/g" /etc/hosts')
+            conn.sudo('touch /home/{}/.ensure_dir/hosts_file_updated'.format(os_user))
     except Exception as err:
         print('Failed to update hosts file', str(err))
         sys.exit(1)
@@ -925,11 +925,11 @@ def ensure_docker_compose(os_user):
         configure_docker(os_user)
         if not exists('/home/{}/.ensure_dir/docker_compose_ensured'.format(os_user)):
             docker_compose_version = "1.24.1"
-            sudo('curl -L https://github.com/docker/compose/releases/download/{}/docker-compose-`uname -s`-`uname -m` -o /usr/local/bin/docker-compose'.format(docker_compose_version))
-            sudo('chmod +x /usr/local/bin/docker-compose')
-            sudo('touch /home/{}/.ensure_dir/docker_compose_ensured'.format(os_user))
-        sudo('systemctl daemon-reload')
-        sudo('systemctl restart docker')
+            conn.sudo('curl -L https://github.com/docker/compose/releases/download/{}/docker-compose-`uname -s`-`uname -m` -o /usr/local/bin/docker-compose'.format(docker_compose_version))
+            conn.sudo('chmod +x /usr/local/bin/docker-compose')
+            conn.sudo('touch /home/{}/.ensure_dir/docker_compose_ensured'.format(os_user))
+        conn.sudo('systemctl daemon-reload')
+        conn.sudo('systemctl restart docker')
         return True
     except:
         return False
@@ -939,42 +939,42 @@ def configure_superset(os_user, keycloak_auth_server_url, keycloak_realm_name, k
     try:
         if not exists('/home/{}/incubator-superset'.format(os_user)):
             with cd('/home/{}'.format(os_user)):
-                sudo('wget https://github.com/apache/incubator-superset/archive/{}.tar.gz'.format(
+                conn.sudo('wget https://github.com/apache/incubator-superset/archive/{}.tar.gz'.format(
                     os.environ['notebook_superset_version']))
-                sudo('tar -xzf {}.tar.gz'.format(os.environ['notebook_superset_version']))
-                sudo('ln -sf incubator-superset-{} incubator-superset'.format(os.environ['notebook_superset_version']))
+                conn.sudo('tar -xzf {}.tar.gz'.format(os.environ['notebook_superset_version']))
+                conn.sudo('ln -sf incubator-superset-{} incubator-superset'.format(os.environ['notebook_superset_version']))
         if not exists('/tmp/superset-notebook_installed'):
-            sudo('mkdir -p /opt/datalab/templates')
-            put('/root/templates', '/opt/datalab', use_sudo=True)
-            sudo('sed -i \'s/OS_USER/{}/g\' /opt/datalab/templates/.env'.format(os_user))
+            conn.sudo('mkdir -p /opt/datalab/templates')
+            conn.put('/root/templates', '/opt/datalab', use_sudo=True)
+            conn.sudo('sed -i \'s/OS_USER/{}/g\' /opt/datalab/templates/.env'.format(os_user))
             proxy_string = '{}:3128'.format(edge_instance_private_ip)
-            sudo('sed -i \'s|KEYCLOAK_AUTH_SERVER_URL|{}|g\' /opt/datalab/templates/id_provider.json'.format(
+            conn.sudo('sed -i \'s|KEYCLOAK_AUTH_SERVER_URL|{}|g\' /opt/datalab/templates/id_provider.json'.format(
                 keycloak_auth_server_url))
-            sudo('sed -i \'s/KEYCLOAK_REALM_NAME/{}/g\' /opt/datalab/templates/id_provider.json'.format(
+            conn.sudo('sed -i \'s/KEYCLOAK_REALM_NAME/{}/g\' /opt/datalab/templates/id_provider.json'.format(
                 keycloak_realm_name))
-            sudo('sed -i \'s/CLIENT_ID/{}/g\' /opt/datalab/templates/id_provider.json'.format(keycloak_client_id))
-            sudo('sed -i \'s/CLIENT_SECRET/{}/g\' /opt/datalab/templates/id_provider.json'.format(
+            conn.sudo('sed -i \'s/CLIENT_ID/{}/g\' /opt/datalab/templates/id_provider.json'.format(keycloak_client_id))
+            conn.sudo('sed -i \'s/CLIENT_SECRET/{}/g\' /opt/datalab/templates/id_provider.json'.format(
                 keycloak_client_secret))
-            sudo('sed -i \'s/PROXY_STRING/{}/g\' /opt/datalab/templates/docker-compose.yml'.format(proxy_string))
-            sudo('sed -i \'s|KEYCLOAK_AUTH_SERVER_URL|{}|g\' /opt/datalab/templates/superset_config.py'.format(
+            conn.sudo('sed -i \'s/PROXY_STRING/{}/g\' /opt/datalab/templates/docker-compose.yml'.format(proxy_string))
+            conn.sudo('sed -i \'s|KEYCLOAK_AUTH_SERVER_URL|{}|g\' /opt/datalab/templates/superset_config.py'.format(
                 keycloak_auth_server_url))
-            sudo('sed -i \'s/KEYCLOAK_REALM_NAME/{}/g\' /opt/datalab/templates/superset_config.py'.format(
+            conn.sudo('sed -i \'s/KEYCLOAK_REALM_NAME/{}/g\' /opt/datalab/templates/superset_config.py'.format(
                 keycloak_realm_name))
-            sudo('sed -i \'s/EDGE_IP/{}/g\' /opt/datalab/templates/superset_config.py'.format(edge_instance_public_ip))
-            sudo('sed -i \'s/SUPERSET_NAME/{}/g\' /opt/datalab/templates/superset_config.py'.format(superset_name))
-            sudo('cp -f /opt/datalab/templates/.env /home/{}/incubator-superset/contrib/docker/'.format(os_user))
-            sudo('cp -f /opt/datalab/templates/docker-compose.yml /home/{}/incubator-superset/contrib/docker/'.format(
+            conn.sudo('sed -i \'s/EDGE_IP/{}/g\' /opt/datalab/templates/superset_config.py'.format(edge_instance_public_ip))
+            conn.sudo('sed -i \'s/SUPERSET_NAME/{}/g\' /opt/datalab/templates/superset_config.py'.format(superset_name))
+            conn.sudo('cp -f /opt/datalab/templates/.env /home/{}/incubator-superset/contrib/docker/'.format(os_user))
+            conn.sudo('cp -f /opt/datalab/templates/docker-compose.yml /home/{}/incubator-superset/contrib/docker/'.format(
                 os_user))
-            sudo('cp -f /opt/datalab/templates/id_provider.json /home/{}/incubator-superset/contrib/docker/'.format(
+            conn.sudo('cp -f /opt/datalab/templates/id_provider.json /home/{}/incubator-superset/contrib/docker/'.format(
                 os_user))
-            sudo(
+            conn.sudo(
                 'cp -f /opt/datalab/templates/requirements-extra.txt /home/{}/incubator-superset/contrib/docker/'.format(
                     os_user))
-            sudo('cp -f /opt/datalab/templates/superset_config.py /home/{}/incubator-superset/contrib/docker/'.format(
+            conn.sudo('cp -f /opt/datalab/templates/superset_config.py /home/{}/incubator-superset/contrib/docker/'.format(
                 os_user))
-            sudo('cp -f /opt/datalab/templates/docker-init.sh /home/{}/incubator-superset/contrib/docker/'.format(
+            conn.sudo('cp -f /opt/datalab/templates/docker-init.sh /home/{}/incubator-superset/contrib/docker/'.format(
                 os_user))
-            sudo('touch /tmp/superset-notebook_installed')
+            conn.sudo('touch /tmp/superset-notebook_installed')
     except Exception as err:
         print("Failed configure superset: " + str(err))
         sys.exit(1)
@@ -991,10 +991,10 @@ def manage_npm_pkg(command):
             else:
                 try:
                     if npm_count % 2 == 0:
-                        sudo('npm config set registry {}'.format(npm_registry[0]))
+                        conn.sudo('npm config set registry {}'.format(npm_registry[0]))
                     else:
-                        sudo('npm config set registry {}'.format(npm_registry[1]))
-                    sudo('npm {}'.format(command))
+                        conn.sudo('npm config set registry {}'.format(npm_registry[1]))
+                    conn.sudo('npm {}'.format(command))
                     installed = True
                 except:
                     npm_count += 1
diff --git a/infrastructure-provisioning/src/general/lib/os/redhat/common_lib.py b/infrastructure-provisioning/src/general/lib/os/redhat/common_lib.py
index 6343635..70d39e5 100644
--- a/infrastructure-provisioning/src/general/lib/os/redhat/common_lib.py
+++ b/infrastructure-provisioning/src/general/lib/os/redhat/common_lib.py
@@ -37,12 +37,12 @@ def manage_pkg(command, environment, requisites):
             else:
                 print('Package manager is:')
                 if environment == 'remote':
-                    if sudo('pgrep yum -a && echo "busy" || echo "ready"') == 'busy':
+                    if conn.sudo('pgrep yum -a && echo "busy" || echo "ready"') == 'busy':
                         counter += 1
                         time.sleep(10)
                     else:
                         allow = True
-                        sudo('yum {0} {1}'.format(command, requisites))
+                        conn.sudo('yum {0} {1}'.format(command, requisites))
                 elif environment == 'local':
                     if local('sudo pgrep yum -a && echo "busy" || echo "ready"', capture=True) == 'busy':
                         counter += 1
@@ -59,43 +59,43 @@ def ensure_pkg(user, requisites='git vim gcc python-devel openssl-devel nmap lib
     try:
         if not exists('/home/{}/.ensure_dir/pkg_upgraded'.format(user)):
             print("Updating repositories and installing requested tools: {}".format(requisites))
-            if sudo("systemctl list-units  --all | grep firewalld | awk '{print $1}'") != '':
-                sudo('systemctl disable firewalld.service')
-                sudo('systemctl stop firewalld.service')
-            sudo('setenforce 0')
-            sudo("sed -i '/^SELINUX=/s/SELINUX=.*/SELINUX=disabled/g' /etc/selinux/config")
+            if conn.sudo("systemctl list-units  --all | grep firewalld | awk '{print $1}'") != '':
+                conn.sudo('systemctl disable firewalld.service')
+                conn.sudo('systemctl stop firewalld.service')
+            conn.sudo('setenforce 0')
+            conn.sudo("sed -i '/^SELINUX=/s/SELINUX=.*/SELINUX=disabled/g' /etc/selinux/config")
             mirror = 'mirror.centos.org'
             with cd('/etc/yum.repos.d/'):
-                sudo('echo "[Centos-repo]" > centOS-base.repo')
-                sudo('echo "name=Centos 7 Repository" >> centOS-base.repo')
-                sudo('echo "baseurl=http://{}/centos/7/os/x86_64/" >> centOS-base.repo'.format(mirror))
-                sudo('echo "enabled=1" >> centOS-base.repo')
-                sudo('echo "gpgcheck=1" >> centOS-base.repo')
-                sudo('echo "gpgkey=http://{}/centos/7/os/x86_64/RPM-GPG-KEY-CentOS-7" >> centOS-base.repo'.format(mirror))
-            sudo('yum-config-manager --enable rhui-REGION-rhel-server-optional')
+                conn.sudo('echo "[Centos-repo]" > centOS-base.repo')
+                conn.sudo('echo "name=Centos 7 Repository" >> centOS-base.repo')
+                conn.sudo('echo "baseurl=http://{}/centos/7/os/x86_64/" >> centOS-base.repo'.format(mirror))
+                conn.sudo('echo "enabled=1" >> centOS-base.repo')
+                conn.sudo('echo "gpgcheck=1" >> centOS-base.repo')
+                conn.sudo('echo "gpgkey=http://{}/centos/7/os/x86_64/RPM-GPG-KEY-CentOS-7" >> centOS-base.repo'.format(mirror))
+            conn.sudo('yum-config-manager --enable rhui-REGION-rhel-server-optional')
             manage_pkg('update-minimal --security -y', 'remote', '')
             manage_pkg('-y install', 'remote', 'wget')
-            sudo('wget --no-check-certificate https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm')
-            sudo('rpm -ivh epel-release-latest-7.noarch.rpm')
+            conn.sudo('wget --no-check-certificate https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm')
+            conn.sudo('rpm -ivh epel-release-latest-7.noarch.rpm')
             manage_pkg('repolist', 'remote', '')
             manage_pkg('-y install', 'remote', 'python3-pip gcc')
-            sudo('rm -f epel-release-latest-7.noarch.rpm')
-            sudo('export LC_ALL=C')
+            conn.sudo('rm -f epel-release-latest-7.noarch.rpm')
+            conn.sudo('export LC_ALL=C')
             manage_pkg('-y install', 'remote', requisites)
-            sudo('touch /home/{}/.ensure_dir/pkg_upgraded'.format(user))
+            conn.sudo('touch /home/{}/.ensure_dir/pkg_upgraded'.format(user))
     except:
         sys.exit(1)
 
 
 def change_pkg_repos():
     if not exists('/tmp/pkg_china_ensured'):
-        put('/root/files/sources.list', '/tmp/sources.list')
-        sudo('mv /tmp/sources.list  /etc/yum.repos.d/CentOS-Base-aliyun.repo')
-        sudo('touch /tmp/pkg_china_ensured')
+        conn.put('/root/files/sources.list', '/tmp/sources.list')
+        conn.sudo('mv /tmp/sources.list  /etc/yum.repos.d/CentOS-Base-aliyun.repo')
+        conn.sudo('touch /tmp/pkg_china_ensured')
 
 
 def find_java_path_remote():
-    java_path = sudo("alternatives --display java | grep 'slave jre: ' | awk '{print $3}'")
+    java_path = conn.sudo("alternatives --display java | grep 'slave jre: ' | awk '{print $3}'")
     return java_path
 
 
@@ -107,15 +107,15 @@ def find_java_path_local():
 def ensure_ntpd(user, edge_private_ip=''):
     try:
         if not exists('/home/{}/.ensure_dir/ntpd_ensured'.format(user)):
-            sudo('systemctl disable chronyd')
+            conn.sudo('systemctl disable chronyd')
             manage_pkg('-y install', 'remote', 'ntp')
-            sudo('echo "tinker panic 0" >> /etc/ntp.conf')
-            sudo('systemctl start ntpd')
+            conn.sudo('echo "tinker panic 0" >> /etc/ntp.conf')
+            conn.sudo('systemctl start ntpd')
             if os.environ['conf_resource'] != 'ssn' and os.environ['conf_resource'] != 'edge':
-                sudo('echo "server {} prefer iburst" >> /etc/ntp.conf'.format(edge_private_ip))
-                sudo('systemctl restart ntpd')
-            sudo('systemctl enable ntpd')
-            sudo('touch /home/{}/.ensure_dir/ntpd_ensured'.format(user))
+                conn.sudo('echo "server {} prefer iburst" >> /etc/ntp.conf'.format(edge_private_ip))
+                conn.sudo('systemctl restart ntpd')
+            conn.sudo('systemctl enable ntpd')
+            conn.sudo('touch /home/{}/.ensure_dir/ntpd_ensured'.format(user))
     except:
         sys.exit(1)
 
@@ -124,7 +124,7 @@ def ensure_java(user):
     try:
         if not exists('/home/{}/.ensure_dir/java_ensured'.format(user)):
             manage_pkg('-y install', 'remote', 'java-1.8.0-openjdk-devel')
-            sudo('touch /home/{}/.ensure_dir/java_ensured'.format(user))
+            conn.sudo('touch /home/{}/.ensure_dir/java_ensured'.format(user))
     except:
         sys.exit(1)
 
@@ -133,11 +133,11 @@ def ensure_step(user):
     try:
         if not exists('/home/{}/.ensure_dir/step_ensured'.format(user)):
             manage_pkg('-y install', 'remote', 'wget')
-            sudo('wget https://github.com/smallstep/cli/releases/download/v0.13.3/step_0.13.3_linux_amd64.tar.gz '
+            conn.sudo('wget https://github.com/smallstep/cli/releases/download/v0.13.3/step_0.13.3_linux_amd64.tar.gz '
                  '-O /tmp/step_0.13.3_linux_amd64.tar.gz')
-            sudo('tar zxvf /tmp/step_0.13.3_linux_amd64.tar.gz -C /tmp/')
-            sudo('mv /tmp/step_0.13.3/bin/step /usr/bin/')
-            sudo('touch /home/{}/.ensure_dir/step_ensured'.format(user))
+            conn.sudo('tar zxvf /tmp/step_0.13.3_linux_amd64.tar.gz -C /tmp/')
+            conn.sudo('mv /tmp/step_0.13.3/bin/step /usr/bin/')
+            conn.sudo('touch /home/{}/.ensure_dir/step_ensured'.format(user))
     except:
         sys.exit(1)
 
diff --git a/infrastructure-provisioning/src/general/lib/os/redhat/edge_lib.py b/infrastructure-provisioning/src/general/lib/os/redhat/edge_lib.py
index 948b3fa..fa22cc3 100644
--- a/infrastructure-provisioning/src/general/lib/os/redhat/edge_lib.py
+++ b/infrastructure-provisioning/src/general/lib/os/redhat/edge_lib.py
@@ -34,26 +34,26 @@ def configure_http_proxy_server(config):
             manage_pkg('-y install', 'remote', 'squid')
             template_file = config['template_file']
             proxy_subnet = config['exploratory_subnet']
-            put(template_file, '/tmp/squid.conf')
-            sudo('\cp /tmp/squid.conf /etc/squid/squid.conf')
-            sudo('sed -i "s|PROXY_SUBNET|{}|g" /etc/squid/squid.conf'.format(proxy_subnet))
-            sudo('sed -i "s|EDGE_USER_NAME|{}|g" /etc/squid/squid.conf'.format(config['project_name']))
-            sudo('sed -i "s|LDAP_HOST|{}|g" /etc/squid/squid.conf'.format(config['ldap_host']))
-            sudo('sed -i "s|LDAP_DN|{}|g" /etc/squid/squid.conf'.format(config['ldap_dn']))
-            sudo('sed -i "s|LDAP_SERVICE_USERNAME|{}|g" /etc/squid/squid.conf'.format(config['ldap_user']))
-            sudo('sed -i "s|LDAP_SERVICE_PASSWORD|{}|g" /etc/squid/squid.conf'.format(config['ldap_password']))
-            sudo('sed -i "s|LDAP_AUTH_PATH|{}|g" /etc/squid/squid.conf'.format('/usr/lib64/squid/basic_ldap_auth'))
+            conn.put(template_file, '/tmp/squid.conf')
+            conn.sudo('\cp /tmp/squid.conf /etc/squid/squid.conf')
+            conn.sudo('sed -i "s|PROXY_SUBNET|{}|g" /etc/squid/squid.conf'.format(proxy_subnet))
+            conn.sudo('sed -i "s|EDGE_USER_NAME|{}|g" /etc/squid/squid.conf'.format(config['project_name']))
+            conn.sudo('sed -i "s|LDAP_HOST|{}|g" /etc/squid/squid.conf'.format(config['ldap_host']))
+            conn.sudo('sed -i "s|LDAP_DN|{}|g" /etc/squid/squid.conf'.format(config['ldap_dn']))
+            conn.sudo('sed -i "s|LDAP_SERVICE_USERNAME|{}|g" /etc/squid/squid.conf'.format(config['ldap_user']))
+            conn.sudo('sed -i "s|LDAP_SERVICE_PASSWORD|{}|g" /etc/squid/squid.conf'.format(config['ldap_password']))
+            conn.sudo('sed -i "s|LDAP_AUTH_PATH|{}|g" /etc/squid/squid.conf'.format('/usr/lib64/squid/basic_ldap_auth'))
             replace_string = ''
             for cidr in config['vpc_cidrs']:
                 replace_string += 'acl AWS_VPC_CIDR dst {}\\n'.format(cidr)
-            sudo('sed -i "s|VPC_CIDRS|{}|g" /etc/squid/squid.conf'.format(replace_string))
+            conn.sudo('sed -i "s|VPC_CIDRS|{}|g" /etc/squid/squid.conf'.format(replace_string))
             replace_string = ''
             for cidr in config['allowed_ip_cidr']:
                 replace_string += 'acl AllowedCIDRS src {}\\n'.format(cidr)
-            sudo('sed -i "s|ALLOWED_CIDRS|{}|g" /etc/squid/squid.conf'.format(replace_string))
-            sudo('systemctl restart squid')
-            sudo('chkconfig squid on')
-            sudo('touch /tmp/http_proxy_ensured')
+            conn.sudo('sed -i "s|ALLOWED_CIDRS|{}|g" /etc/squid/squid.conf'.format(replace_string))
+            conn.sudo('systemctl restart squid')
+            conn.sudo('chkconfig squid on')
+            conn.sudo('touch /tmp/http_proxy_ensured')
     except Exception as err:
         print("Failed to install and configure squid: " + str(err))
         sys.exit(1)
@@ -64,132 +64,132 @@ def install_nginx_lua(edge_ip, nginx_version, keycloak_auth_server_url, keycloak
     try:
         if not os.path.exists('/tmp/nginx_installed'):
             manage_pkg('-y install', 'remote', 'wget')
-            sudo('wget https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm')
+            conn.sudo('wget https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm')
             try:
-                sudo('rpm -ivh epel-release-latest-7.noarch.rpm')
+                conn.sudo('rpm -ivh epel-release-latest-7.noarch.rpm')
             except:
                 print('Looks like EPEL is already installed.')
             manage_pkg('-y install', 'remote', 'gcc gcc-c++ make zlib-devel pcre-devel openssl-devel git openldap-devel')
             if os.environ['conf_stepcerts_enabled'] == 'true':
-                sudo('mkdir -p /home/{0}/keys'.format(user))
-                sudo('''bash -c 'echo "{0}" | base64 --decode > /etc/ssl/certs/root_ca.crt' '''.format(
+                conn.sudo('mkdir -p /home/{0}/keys'.format(user))
+                conn.sudo('''bash -c 'echo "{0}" | base64 --decode > /etc/ssl/certs/root_ca.crt' '''.format(
                      os.environ['conf_stepcerts_root_ca']))
-                fingerprint = sudo('step certificate fingerprint /etc/ssl/certs/root_ca.crt')
-                sudo('step ca bootstrap --fingerprint {0} --ca-url "{1}"'.format(fingerprint,
+                fingerprint = conn.sudo('step certificate fingerprint /etc/ssl/certs/root_ca.crt')
+                conn.sudo('step ca bootstrap --fingerprint {0} --ca-url "{1}"'.format(fingerprint,
                                                                                  os.environ['conf_stepcerts_ca_url']))
-                sudo('echo "{0}" > /home/{1}/keys/provisioner_password'.format(
+                conn.sudo('echo "{0}" > /home/{1}/keys/provisioner_password'.format(
                      os.environ['conf_stepcerts_kid_password'], user))
                 sans = "--san localhost --san 127.0.0.1 {0}".format(step_cert_sans)
                 cn = edge_ip
-                sudo('step ca token {3} --kid {0} --ca-url "{1}" --root /etc/ssl/certs/root_ca.crt '
+                conn.sudo('step ca token {3} --kid {0} --ca-url "{1}" --root /etc/ssl/certs/root_ca.crt '
                      '--password-file /home/{2}/keys/provisioner_password {4} --output-file /tmp/step_token'.format(
                     os.environ['conf_stepcerts_kid'], os.environ['conf_stepcerts_ca_url'], user, cn, sans))
-                token = sudo('cat /tmp/step_token')
-                sudo('step ca certificate "{0}" /etc/ssl/certs/datalab.crt /etc/ssl/certs/datalab.key '
+                token = conn.sudo('cat /tmp/step_token')
+                conn.sudo('step ca certificate "{0}" /etc/ssl/certs/datalab.crt /etc/ssl/certs/datalab.key '
                      '--token "{1}" --kty=RSA --size 2048 --provisioner {2} '.format(cn, token,
                                                                                      os.environ['conf_stepcerts_kid']))
-                sudo('touch /var/log/renew_certificates.log')
-                put('/root/templates/manage_step_certs.sh', '/usr/local/bin/manage_step_certs.sh', use_sudo=True)
-                sudo('chmod +x /usr/local/bin/manage_step_certs.sh')
-                sudo('sed -i "s|STEP_ROOT_CERT_PATH|/etc/ssl/certs/root_ca.crt|g" '
+                conn.sudo('touch /var/log/renew_certificates.log')
+                conn.put('/root/templates/manage_step_certs.sh', '/usr/local/bin/manage_step_certs.sh', use_sudo=True)
+                conn.sudo('chmod +x /usr/local/bin/manage_step_certs.sh')
+                conn.sudo('sed -i "s|STEP_ROOT_CERT_PATH|/etc/ssl/certs/root_ca.crt|g" '
                      '/usr/local/bin/manage_step_certs.sh')
-                sudo('sed -i "s|STEP_CERT_PATH|/etc/ssl/certs/datalab.crt|g" /usr/local/bin/manage_step_certs.sh')
-                sudo('sed -i "s|STEP_KEY_PATH|/etc/ssl/certs/datalab.key|g" /usr/local/bin/manage_step_certs.sh')
-                sudo('sed -i "s|STEP_CA_URL|{0}|g" /usr/local/bin/manage_step_certs.sh'.format(
+                conn.sudo('sed -i "s|STEP_CERT_PATH|/etc/ssl/certs/datalab.crt|g" /usr/local/bin/manage_step_certs.sh')
+                conn.sudo('sed -i "s|STEP_KEY_PATH|/etc/ssl/certs/datalab.key|g" /usr/local/bin/manage_step_certs.sh')
+                conn.sudo('sed -i "s|STEP_CA_URL|{0}|g" /usr/local/bin/manage_step_certs.sh'.format(
                     os.environ['conf_stepcerts_ca_url']))
-                sudo('sed -i "s|RESOURCE_TYPE|edge|g" /usr/local/bin/manage_step_certs.sh')
-                sudo('sed -i "s|SANS|{0}|g" /usr/local/bin/manage_step_certs.sh'.format(sans))
-                sudo('sed -i "s|CN|{0}|g" /usr/local/bin/manage_step_certs.sh'.format(cn))
-                sudo('sed -i "s|KID|{0}|g" /usr/local/bin/manage_step_certs.sh'.format(
+                conn.sudo('sed -i "s|RESOURCE_TYPE|edge|g" /usr/local/bin/manage_step_certs.sh')
+                conn.sudo('sed -i "s|SANS|{0}|g" /usr/local/bin/manage_step_certs.sh'.format(sans))
+                conn.sudo('sed -i "s|CN|{0}|g" /usr/local/bin/manage_step_certs.sh'.format(cn))
+                conn.sudo('sed -i "s|KID|{0}|g" /usr/local/bin/manage_step_certs.sh'.format(
                     os.environ['conf_stepcerts_kid']))
-                sudo('sed -i "s|STEP_PROVISIONER_PASSWORD_PATH|/home/{0}/keys/provisioner_password|g" '
+                conn.sudo('sed -i "s|STEP_PROVISIONER_PASSWORD_PATH|/home/{0}/keys/provisioner_password|g" '
                      '/usr/local/bin/manage_step_certs.sh'.format(user))
-                sudo('bash -c \'echo "0 * * * * root /usr/local/bin/manage_step_certs.sh >> '
+                conn.sudo('bash -c \'echo "0 * * * * root /usr/local/bin/manage_step_certs.sh >> '
                      '/var/log/renew_certificates.log 2>&1" >> /etc/crontab \'')
-                put('/root/templates/step-cert-manager.service', '/etc/systemd/system/step-cert-manager.service',
+                conn.put('/root/templates/step-cert-manager.service', '/etc/systemd/system/step-cert-manager.service',
                     use_sudo=True)
-                sudo('systemctl daemon-reload')
-                sudo('systemctl enable step-cert-manager.service')
+                conn.sudo('systemctl daemon-reload')
+                conn.sudo('systemctl enable step-cert-manager.service')
             else:
                 if os.environ['conf_letsencrypt_enabled'] == 'true':
                     print(
                         'Lets Encrypt certificates are not supported for redhat in DataLab. Using self signed certificates')
-                sudo('openssl req -x509 -nodes -days 3650 -newkey rsa:2048 -keyout /etc/ssl/certs/datalab.key \
+                conn.sudo('openssl req -x509 -nodes -days 3650 -newkey rsa:2048 -keyout /etc/ssl/certs/datalab.key \
                      -out /etc/ssl/certs/datalab.crt -subj "/C=US/ST=US/L=US/O=datalab/CN={}"'.format(hostname))
-            sudo('mkdir -p /tmp/lua')
-            sudo('mkdir -p /tmp/src')
+            conn.sudo('mkdir -p /tmp/lua')
+            conn.sudo('mkdir -p /tmp/src')
             with cd('/tmp/src/'):
-                sudo('wget http://nginx.org/download/nginx-{}.tar.gz'.format(nginx_version))
-                sudo('tar -xzf nginx-{}.tar.gz'.format(nginx_version))
+                conn.sudo('wget http://nginx.org/download/nginx-{}.tar.gz'.format(nginx_version))
+                conn.sudo('tar -xzf nginx-{}.tar.gz'.format(nginx_version))
 
-                sudo('wget https://github.com/openresty/lua-nginx-module/archive/v0.10.15.tar.gz')
-                sudo('tar -xzf v0.10.15.tar.gz')
+                conn.sudo('wget https://github.com/openresty/lua-nginx-module/archive/v0.10.15.tar.gz')
+                conn.sudo('tar -xzf v0.10.15.tar.gz')
 
-                sudo('wget https://github.com/simplresty/ngx_devel_kit/archive/v0.3.1.tar.gz')
-                sudo('tar -xzf v0.3.1.tar.gz')
+                conn.sudo('wget https://github.com/simplresty/ngx_devel_kit/archive/v0.3.1.tar.gz')
+                conn.sudo('tar -xzf v0.3.1.tar.gz')
 
-                sudo('wget http://luajit.org/download/LuaJIT-2.0.5.tar.gz')
-                sudo('tar -xzf LuaJIT-2.0.5.tar.gz')
+                conn.sudo('wget http://luajit.org/download/LuaJIT-2.0.5.tar.gz')
+                conn.sudo('tar -xzf LuaJIT-2.0.5.tar.gz')
 
-                sudo('wget http://keplerproject.github.io/luarocks/releases/luarocks-2.2.2.tar.gz')
-                sudo('tar -xzf luarocks-2.2.2.tar.gz')
+                conn.sudo('wget http://keplerproject.github.io/luarocks/releases/luarocks-2.2.2.tar.gz')
+                conn.sudo('tar -xzf luarocks-2.2.2.tar.gz')
 
-                sudo('ln -sf nginx-{} nginx'.format(nginx_version))
+                conn.sudo('ln -sf nginx-{} nginx'.format(nginx_version))
 
             with cd('/tmp/src/LuaJIT-2.0.5/'):
-                sudo('make')
-                sudo('make install')
+                conn.sudo('make')
+                conn.sudo('make install')
 
             with cd('/tmp/src/nginx/'), shell_env(LUAJIT_LIB='/usr/local/lib/', LUAJIT_INC='/usr/local/include/luajit-2.0'):
-                sudo('./configure --user=nginx --group=nginx --prefix=/etc/nginx --sbin-path=/usr/sbin/nginx \
+                conn.sudo('./configure --user=nginx --group=nginx --prefix=/etc/nginx --sbin-path=/usr/sbin/nginx \
                                               --conf-path=/etc/nginx/nginx.conf --pid-path=/run/nginx.pid --lock-path=/run/lock/subsys/nginx \
                                               --error-log-path=/var/log/nginx/error.log --http-log-path=/var/log/nginx/access.log \
                                               --with-http_gzip_static_module --with-http_stub_status_module --with-http_ssl_module --with-pcre \
                                               --with-http_realip_module --with-file-aio --with-ipv6 --with-http_v2_module --with-ld-opt="-Wl,-rpath,$LUAJIT_LIB"  \
                                               --without-http_scgi_module --without-http_uwsgi_module --without-http_fastcgi_module --with-http_sub_module \
                                               --add-dynamic-module=/tmp/src/ngx_devel_kit-0.3.1 --add-dynamic-module=/tmp/src/lua-nginx-module-0.10.15')
-                sudo('make')
-                sudo('make install')
+                conn.sudo('make')
+                conn.sudo('make install')
 
             with cd('/tmp/src/luarocks-2.2.2/'):
-                sudo('./configure --with-lua-include=/usr/local/include/luajit-2.0')
-                sudo('make build')
-                sudo('make install')
-                sudo('luarocks install lua-resty-jwt')
-                sudo('luarocks install lua-resty-session')
-                sudo('luarocks install lua-resty-http')
-                sudo('luarocks install lua-resty-openidc')
-                sudo('luarocks install luacrypto')
-                sudo('luarocks install lua-cjson')
-                sudo('luarocks install lua-resty-core')
-                sudo('luarocks install random')
-                sudo('luarocks install lua-resty-string')
-
-            sudo('useradd -r nginx')
-            sudo('rm -f /etc/nginx/nginx.conf')
-            sudo('mkdir -p /opt/datalab/templates')
-            put('/root/templates', '/opt/datalab', use_sudo=True)
-            sudo('sed -i \'s/EDGE_IP/{}/g\' /opt/datalab/templates/conf.d/proxy.conf'.format(edge_ip))
-            sudo('sed -i \'s|KEYCLOAK_AUTH_URL|{}|g\' /opt/datalab/templates/conf.d/proxy.conf'.format(
+                conn.sudo('./configure --with-lua-include=/usr/local/include/luajit-2.0')
+                conn.sudo('make build')
+                conn.sudo('make install')
+                conn.sudo('luarocks install lua-resty-jwt')
+                conn.sudo('luarocks install lua-resty-session')
+                conn.sudo('luarocks install lua-resty-http')
+                conn.sudo('luarocks install lua-resty-openidc')
+                conn.sudo('luarocks install luacrypto')
+                conn.sudo('luarocks install lua-cjson')
+                conn.sudo('luarocks install lua-resty-core')
+                conn.sudo('luarocks install random')
+                conn.sudo('luarocks install lua-resty-string')
+
+            conn.sudo('useradd -r nginx')
+            conn.sudo('rm -f /etc/nginx/nginx.conf')
+            conn.sudo('mkdir -p /opt/datalab/templates')
+            conn.put('/root/templates', '/opt/datalab', use_sudo=True)
+            conn.sudo('sed -i \'s/EDGE_IP/{}/g\' /opt/datalab/templates/conf.d/proxy.conf'.format(edge_ip))
+            conn.sudo('sed -i \'s|KEYCLOAK_AUTH_URL|{}|g\' /opt/datalab/templates/conf.d/proxy.conf'.format(
                 keycloak_auth_server_url))
-            sudo('sed -i \'s/KEYCLOAK_REALM_NAME/{}/g\' /opt/datalab/templates/conf.d/proxy.conf'.format(
+            conn.sudo('sed -i \'s/KEYCLOAK_REALM_NAME/{}/g\' /opt/datalab/templates/conf.d/proxy.conf'.format(
                 keycloak_realm_name))
-            sudo('sed -i \'s/KEYCLOAK_CLIENT_ID/{}/g\' /opt/datalab/templates/conf.d/proxy.conf'.format(
+            conn.sudo('sed -i \'s/KEYCLOAK_CLIENT_ID/{}/g\' /opt/datalab/templates/conf.d/proxy.conf'.format(
                 keycloak_client_id))
-            sudo('sed -i \'s/KEYCLOAK_CLIENT_SECRET/{}/g\' /opt/datalab/templates/conf.d/proxy.conf'.format(
+            conn.sudo('sed -i \'s/KEYCLOAK_CLIENT_SECRET/{}/g\' /opt/datalab/templates/conf.d/proxy.conf'.format(
                 keycloak_client_secret))
 
-            sudo('cp /opt/datalab/templates/nginx.conf /etc/nginx/')
-            sudo('mkdir /etc/nginx/conf.d')
-            sudo('cp /opt/datalab/templates/conf.d/proxy.conf /etc/nginx/conf.d/')
-            sudo('mkdir /etc/nginx/locations')
-            sudo('cp /opt/datalab/templates/nginx_redhat /etc/init.d/nginx')
-            sudo('chmod +x /etc/init.d/nginx')
-            sudo('chkconfig --add nginx')
-            sudo('chkconfig --level 345 nginx on')
-            sudo('setsebool -P httpd_can_network_connect 1')
-            sudo('service nginx start')
-            sudo('touch /tmp/nginx_installed')
+            conn.sudo('cp /opt/datalab/templates/nginx.conf /etc/nginx/')
+            conn.sudo('mkdir /etc/nginx/conf.d')
+            conn.sudo('cp /opt/datalab/templates/conf.d/proxy.conf /etc/nginx/conf.d/')
+            conn.sudo('mkdir /etc/nginx/locations')
+            conn.sudo('cp /opt/datalab/templates/nginx_redhat /etc/init.d/nginx')
+            conn.sudo('chmod +x /etc/init.d/nginx')
+            conn.sudo('chkconfig --add nginx')
+            conn.sudo('chkconfig --level 345 nginx on')
+            conn.sudo('setsebool -P httpd_can_network_connect 1')
+            conn.sudo('service nginx start')
+            conn.sudo('touch /tmp/nginx_installed')
     except Exception as err:
         print("Failed install nginx with ldap: " + str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/lib/os/redhat/notebook_lib.py b/infrastructure-provisioning/src/general/lib/os/redhat/notebook_lib.py
index 3963ea6..e017af4 100644
--- a/infrastructure-provisioning/src/general/lib/os/redhat/notebook_lib.py
+++ b/infrastructure-provisioning/src/general/lib/os/redhat/notebook_lib.py
@@ -34,13 +34,13 @@ from fabric.contrib.files import exists
 def enable_proxy(proxy_host, proxy_port):
     try:
         proxy_string = "http://%s:%s" % (proxy_host, proxy_port)
-        sudo('sed -i "/^export http_proxy/d" /etc/profile')
-        sudo('sed -i "/^export https_proxy/d" /etc/profile')
-        sudo('echo export http_proxy=' + proxy_string + ' >> /etc/profile')
-        sudo('echo export https_proxy=' + proxy_string + ' >> /etc/profile')
+        conn.sudo('sed -i "/^export http_proxy/d" /etc/profile')
+        conn.sudo('sed -i "/^export https_proxy/d" /etc/profile')
+        conn.sudo('echo export http_proxy=' + proxy_string + ' >> /etc/profile')
+        conn.sudo('echo export https_proxy=' + proxy_string + ' >> /etc/profile')
         if exists('/etc/yum.conf'):
-            sudo('sed -i "/^proxy=/d" /etc/yum.conf')
-        sudo("echo 'proxy={}' >> /etc/yum.conf".format(proxy_string))
+            conn.sudo('sed -i "/^proxy=/d" /etc/yum.conf')
+        conn.sudo("echo 'proxy={}' >> /etc/yum.conf".format(proxy_string))
         manage_pkg('clean all', 'remote', '')
     except:
         sys.exit(1)
@@ -48,7 +48,7 @@ def enable_proxy(proxy_host, proxy_port):
 
 def downgrade_python_version():
     try:
-       sudo('python3 -c "import os,sys,yum; yb = yum.YumBase(); pl = yb.doPackageLists(); \
+       conn.sudo('python3 -c "import os,sys,yum; yb = yum.YumBase(); pl = yb.doPackageLists(); \
         version = [pkg.vr for pkg in pl.installed if pkg.name == \'python\']; \
         os.system(\'yum -y downgrade python python-devel-2.7.5-58.el7.x86_64 python-libs-2.7.5-58.el7.x86_64\') \
         if version != [] and version[0] == \'2.7.5-68.el7\' else False"')
@@ -59,22 +59,22 @@ def downgrade_python_version():
 def ensure_r_local_kernel(spark_version, os_user, templates_dir, kernels_dir):
     if not exists('/home/{}/.ensure_dir/r_kernel_ensured'.format(os_user)):
         try:
-            sudo('chown -R ' + os_user + ':' + os_user + ' /home/' + os_user + '/.local')
-            run('R -e "IRkernel::installspec()"')
-            sudo('ln -s /opt/spark/ /usr/local/spark')
+            conn.sudo('chown -R ' + os_user + ':' + os_user + ' /home/' + os_user + '/.local')
+            conn.run('R -e "IRkernel::installspec()"')
+            conn.sudo('ln -s /opt/spark/ /usr/local/spark')
             try:
-                sudo('cd /usr/local/spark/R/lib/SparkR; R -e "install.packages(\'roxygen2\',repos=\'https://cloud.r-project.org\')" R -e "devtools::check(\'.\')"')
+                conn.sudo('cd /usr/local/spark/R/lib/SparkR; R -e "install.packages(\'roxygen2\',repos=\'https://cloud.r-project.org\')" R -e "devtools::check(\'.\')"')
             except:
                 pass
-            sudo('cd /usr/local/spark/R/lib/SparkR; R -e "devtools::install(\'.\')"')
-            r_version = sudo("R --version | awk '/version / {print $3}'")
-            put(templates_dir + 'r_template.json', '/tmp/r_template.json')
-            sudo('sed -i "s|R_VER|' + r_version + '|g" /tmp/r_template.json')
-            sudo('sed -i "s|SP_VER|' + spark_version + '|g" /tmp/r_template.json')
-            sudo('\cp -f /tmp/r_template.json {}/ir/kernel.json'.format(kernels_dir))
-            sudo('ln -s /usr/lib64/R/ /usr/lib/R')
-            sudo('chown -R ' + os_user + ':' + os_user + ' /home/' + os_user + '/.local')
-            sudo('touch /home/{}/.ensure_dir/r_kernel_ensured'.format(os_user))
+            conn.sudo('cd /usr/local/spark/R/lib/SparkR; R -e "devtools::install(\'.\')"')
+            r_version = conn.sudo("R --version | awk '/version / {print $3}'")
+            conn.put(templates_dir + 'r_template.json', '/tmp/r_template.json')
+            conn.sudo('sed -i "s|R_VER|' + r_version + '|g" /tmp/r_template.json')
+            conn.sudo('sed -i "s|SP_VER|' + spark_version + '|g" /tmp/r_template.json')
+            conn.sudo('\cp -f /tmp/r_template.json {}/ir/kernel.json'.format(kernels_dir))
+            conn.sudo('ln -s /usr/lib64/R/ /usr/lib/R')
+            conn.sudo('chown -R ' + os_user + ':' + os_user + ' /home/' + os_user + '/.local')
+            conn.sudo('touch /home/{}/.ensure_dir/r_kernel_ensured'.format(os_user))
         except:
             sys.exit(1)
 
@@ -88,17 +88,17 @@ def ensure_r(os_user, r_libs, region, r_mirror):
                 r_repository = 'https://cloud.r-project.org'
             manage_pkg('-y install', 'remote', 'cmake')
             manage_pkg('-y install', 'remote', 'libcur*')
-            sudo('echo -e "[base]\nname=CentOS-7-Base\nbaseurl=http://buildlogs.centos.org/centos/7/os/x86_64-20140704-1/\ngpgcheck=1\ngpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7\npriority=1\nexclude=php mysql" >> /etc/yum.repos.d/CentOS-base.repo')
+            conn.sudo('echo -e "[base]\nname=CentOS-7-Base\nbaseurl=http://buildlogs.centos.org/centos/7/os/x86_64-20140704-1/\ngpgcheck=1\ngpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7\npriority=1\nexclude=php mysql" >> /etc/yum.repos.d/CentOS-base.repo')
             manage_pkg('-y install', 'remote', 'R R-core R-core-devel R-devel --nogpgcheck')
-            sudo('R CMD javareconf')
-            sudo('cd /root; git clone https://github.com/zeromq/zeromq4-x.git; cd zeromq4-x/; mkdir build; cd build; cmake ..; make install; ldconfig')
+            conn.sudo('R CMD javareconf')
+            conn.sudo('cd /root; git clone https://github.com/zeromq/zeromq4-x.git; cd zeromq4-x/; mkdir build; cd build; cmake ..; make install; ldconfig')
             for i in r_libs:
-                sudo('R -e "install.packages(\'{}\',repos=\'{}\')"'.format(i, r_repository))
-            sudo('R -e "library(\'devtools\');install.packages(repos=\'{}\',c(\'rzmq\',\'repr\',\'digest\',\'stringr\',\'RJSONIO\',\'functional\',\'plyr\'))"'.format(r_repository))
-            sudo('R -e "library(\'devtools\');install_github(\'IRkernel/repr\');install_github(\'IRkernel/IRdisplay\');install_github(\'IRkernel/IRkernel\');"')
-            sudo('R -e "library(\'devtools\');install_version(\'keras\', version = \'{}\', repos = \'{}\');"'.format(os.environ['notebook_keras_version'],r_repository))
-            sudo('R -e "install.packages(\'RJDBC\',repos=\'{}\',dep=TRUE)"'.format(r_repository))
-            sudo('touch /home/{}/.ensure_dir/r_ensured'.format(os_user))
+                conn.sudo('R -e "install.packages(\'{}\',repos=\'{}\')"'.format(i, r_repository))
+            conn.sudo('R -e "library(\'devtools\');install.packages(repos=\'{}\',c(\'rzmq\',\'repr\',\'digest\',\'stringr\',\'RJSONIO\',\'functional\',\'plyr\'))"'.format(r_repository))
+            conn.sudo('R -e "library(\'devtools\');install_github(\'IRkernel/repr\');install_github(\'IRkernel/IRdisplay\');install_github(\'IRkernel/IRkernel\');"')
+            conn.sudo('R -e "library(\'devtools\');install_version(\'keras\', version = \'{}\', repos = \'{}\');"'.format(os.environ['notebook_keras_version'],r_repository))
+            conn.sudo('R -e "install.packages(\'RJDBC\',repos=\'{}\',dep=TRUE)"'.format(r_repository))
+            conn.sudo('touch /home/{}/.ensure_dir/r_ensured'.format(os_user))
         except:
             sys.exit(1)
 
@@ -107,34 +107,34 @@ def install_rstudio(os_user, local_spark_path, rstudio_pass, rstudio_version):
     if not exists('/home/' + os_user + '/.ensure_dir/rstudio_ensured'):
         try:
             manage_pkg('-y install --nogpgcheck', 'remote', 'https://download2.rstudio.org/server/centos6/x86_64/rstudio-server-rhel-{}-x86_64.rpm'.format(rstudio_version))
-            sudo('mkdir -p /mnt/var')
-            sudo('chown {0}:{0} /mnt/var'.format(os_user))
-            sudo("sed -i '/Type=forking/a \Environment=USER=datalab-user' /lib/systemd/system/rstudio-server.service")
-            sudo(
+            conn.sudo('mkdir -p /mnt/var')
+            conn.sudo('chown {0}:{0} /mnt/var'.format(os_user))
+            conn.sudo("sed -i '/Type=forking/a \Environment=USER=datalab-user' /lib/systemd/system/rstudio-server.service")
+            conn.sudo(
                 "sed -i '/ExecStart/s|=/usr/lib/rstudio-server/bin/rserver|=/bin/bash -c \"export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/opt/cudnn/lib64:/usr/local/cuda/lib64; /usr/lib/rstudio-server/bin/rserver --auth-none 1|g' /lib/systemd/system/rstudio-server.service")
-            sudo("sed -i '/ExecStart/s|$|\"|g' /lib/systemd/system/rstudio-server.service")
-            sudo("systemctl daemon-reload")
-            sudo('touch /home/{}/.Renviron'.format(os_user))
-            sudo('chown {0}:{0} /home/{0}/.Renviron'.format(os_user))
-            sudo('''echo 'SPARK_HOME="{0}"' >> /home/{1}/.Renviron'''.format(local_spark_path, os_user))
-            sudo('touch /home/{}/.Rprofile'.format(os_user))
-            sudo('chown {0}:{0} /home/{0}/.Rprofile'.format(os_user))
-            sudo('''echo 'library(SparkR, lib.loc = c(file.path(Sys.getenv("SPARK_HOME"), "R", "lib")))' >> /home/{}/.Rprofile'''.format(os_user))
-            http_proxy = run('echo $http_proxy')
-            https_proxy = run('echo $https_proxy')
-            sudo('''echo 'Sys.setenv(http_proxy = \"{}\")' >> /home/{}/.Rprofile'''.format(http_proxy, os_user))
-            sudo('''echo 'Sys.setenv(https_proxy = \"{}\")' >> /home/{}/.Rprofile'''.format(https_proxy, os_user))
-            sudo('rstudio-server start')
-            sudo('echo "{0}:{1}" | chpasswd'.format(os_user, rstudio_pass))
-            sudo("sed -i '/exit 0/d' /etc/rc.local")
-            sudo('''bash -c "echo \'sed -i 's/^#SPARK_HOME/SPARK_HOME/' /home/{}/.Renviron\' >> /etc/rc.local"'''.format(os_user))
-            sudo("bash -c 'echo exit 0 >> /etc/rc.local'")
-            sudo('touch /home/{}/.ensure_dir/rstudio_ensured'.format(os_user))
+            conn.sudo("sed -i '/ExecStart/s|$|\"|g' /lib/systemd/system/rstudio-server.service")
+            conn.sudo("systemctl daemon-reload")
+            conn.sudo('touch /home/{}/.Renviron'.format(os_user))
+            conn.sudo('chown {0}:{0} /home/{0}/.Renviron'.format(os_user))
+            conn.sudo('''echo 'SPARK_HOME="{0}"' >> /home/{1}/.Renviron'''.format(local_spark_path, os_user))
+            conn.sudo('touch /home/{}/.Rprofile'.format(os_user))
+            conn.sudo('chown {0}:{0} /home/{0}/.Rprofile'.format(os_user))
+            conn.sudo('''echo 'library(SparkR, lib.loc = c(file.path(Sys.getenv("SPARK_HOME"), "R", "lib")))' >> /home/{}/.Rprofile'''.format(os_user))
+            http_proxy = conn.run('echo $http_proxy')
+            https_proxy = conn.run('echo $https_proxy')
+            conn.sudo('''echo 'Sys.setenv(http_proxy = \"{}\")' >> /home/{}/.Rprofile'''.format(http_proxy, os_user))
+            conn.sudo('''echo 'Sys.setenv(https_proxy = \"{}\")' >> /home/{}/.Rprofile'''.format(https_proxy, os_user))
+            conn.sudo('rstudio-server start')
+            conn.sudo('echo "{0}:{1}" | chpasswd'.format(os_user, rstudio_pass))
+            conn.sudo("sed -i '/exit 0/d' /etc/rc.local")
+            conn.sudo('''bash -c "echo \'sed -i 's/^#SPARK_HOME/SPARK_HOME/' /home/{}/.Renviron\' >> /etc/rc.local"'''.format(os_user))
+            conn.sudo("bash -c 'echo exit 0 >> /etc/rc.local'")
+            conn.sudo('touch /home/{}/.ensure_dir/rstudio_ensured'.format(os_user))
         except:
             sys.exit(1)
     else:
         try:
-            sudo('echo "{0}:{1}" | chpasswd'.format(os_user, rstudio_pass))
+            conn.sudo('echo "{0}:{1}" | chpasswd'.format(os_user, rstudio_pass))
         except:
             sys.exit(1)
 
@@ -142,10 +142,10 @@ def install_rstudio(os_user, local_spark_path, rstudio_pass, rstudio_version):
 def ensure_matplot(os_user):
     if not exists('/home/{}/.ensure_dir/matplot_ensured'.format(os_user)):
         try:
-            sudo('python3.5 -m pip install matplotlib==2.0.2 --no-cache-dir')
+            conn.sudo('python3.5 -m pip install matplotlib==2.0.2 --no-cache-dir')
             if os.environ['application'] in ('tensor', 'deeplearning'):
-                sudo('python3.8 -m pip install -U numpy=={} --no-cache-dir'.format(os.environ['notebook_numpy_version']))
-            sudo('touch /home/{}/.ensure_dir/matplot_ensured'.format(os_user))
+                conn.sudo('python3.8 -m pip install -U numpy=={} --no-cache-dir'.format(os.environ['notebook_numpy_version']))
+            conn.sudo('touch /home/{}/.ensure_dir/matplot_ensured'.format(os_user))
         except:
             sys.exit(1)
 
@@ -153,9 +153,9 @@ def ensure_matplot(os_user):
 def ensure_sbt(os_user):
     if not exists('/home/{}/.ensure_dir/sbt_ensured'.format(os_user)):
         try:
-            sudo('curl https://bintray.com/sbt/rpm/rpm | sudo tee /etc/yum.repos.d/bintray-sbt-rpm.repo')
+            conn.sudo('curl https://bintray.com/sbt/rpm/rpm | sudo tee /etc/yum.repos.d/bintray-sbt-rpm.repo')
             manage_pkg('-y install', 'remote', 'sbt')
-            sudo('touch /home/{}/.ensure_dir/sbt_ensured'.format(os_user))
+            conn.sudo('touch /home/{}/.ensure_dir/sbt_ensured'.format(os_user))
         except:
             sys.exit(1)
 
@@ -165,7 +165,7 @@ def ensure_jre_jdk(os_user):
         try:
             manage_pkg('-y install', 'remote', 'java-1.8.0-openjdk')
             manage_pkg('-y install', 'remote', 'java-1.8.0-openjdk-devel')
-            sudo('touch /home/' + os_user + '/.ensure_dir/jre_jdk_ensured')
+            conn.sudo('touch /home/' + os_user + '/.ensure_dir/jre_jdk_ensured')
         except:
             sys.exit(1)
 
@@ -173,9 +173,9 @@ def ensure_jre_jdk(os_user):
 def ensure_scala(scala_link, scala_version, os_user):
     if not exists('/home/' + os_user + '/.ensure_dir/scala_ensured'):
         try:
-            sudo('wget {}scala-{}.rpm -O /tmp/scala.rpm'.format(scala_link, scala_version))
-            sudo('rpm -i /tmp/scala.rpm')
-            sudo('touch /home/' + os_user + '/.ensure_dir/scala_ensured')
+            conn.sudo('wget {}scala-{}.rpm -O /tmp/scala.rpm'.format(scala_link, scala_version))
+            conn.sudo('rpm -i /tmp/scala.rpm')
+            conn.sudo('touch /home/' + os_user + '/.ensure_dir/scala_ensured')
         except:
             sys.exit(1)
 
@@ -186,10 +186,10 @@ def ensure_additional_python_libs(os_user):
             manage_pkg('clean', 'remote', 'all')
             manage_pkg('-y install', 'remote', 'zlib-devel libjpeg-turbo-devel --nogpgcheck')
             if os.environ['application'] in ('jupyter', 'zeppelin'):
-                sudo('python3.5 -m pip install NumPy=={} SciPy pandas Sympy Pillow sklearn --no-cache-dir'.format(os.environ['notebook_numpy_version']))
+                conn.sudo('python3.5 -m pip install NumPy=={} SciPy pandas Sympy Pillow sklearn --no-cache-dir'.format(os.environ['notebook_numpy_version']))
             if os.environ['application'] in ('tensor', 'deeplearning'):
-                sudo('python3.8 -m pip install opencv-python h5py --no-cache-dir')
-            sudo('touch /home/' + os_user + '/.ensure_dir/additional_python_libs_ensured')
+                conn.sudo('python3.8 -m pip install opencv-python h5py --no-cache-dir')
+            conn.sudo('touch /home/' + os_user + '/.ensure_dir/additional_python_libs_ensured')
         except:
             sys.exit(1)
 
@@ -201,9 +201,9 @@ def ensure_python3_specific_version(python3_version, os_user):
             manage_pkg('-y groupinstall', 'remote', 'development --nogpgcheck')
             if len(python3_version) < 4:
                 python3_version = python3_version + ".0"
-            sudo('wget https://www.python.org/ftp/python/{0}/Python-{0}.tgz'.format(python3_version))
-            sudo('tar xzf Python-{0}.tgz; cd Python-{0}; ./configure --prefix=/usr/local; make altinstall'.format(python3_version))
-            sudo('touch /home/' + os_user + '/.ensure_dir/python3_specific_version_ensured')
+            conn.sudo('wget https://www.python.org/ftp/python/{0}/Python-{0}.tgz'.format(python3_version))
+            conn.sudo('tar xzf Python-{0}.tgz; cd Python-{0}; ./configure --prefix=/usr/local; make altinstall'.format(python3_version))
+            conn.sudo('touch /home/' + os_user + '/.ensure_dir/python3_specific_version_ensured')
         except:
             sys.exit(1)
 
@@ -212,16 +212,16 @@ def ensure_python3_libraries(os_user):
         try:
             manage_pkg('-y install', 'remote', 'https://centos7.iuscommunity.org/ius-release.rpm')
             manage_pkg('-y install', 'remote', 'python35u python35u-pip python35u-devel')
-            sudo('python3.5 -m pip install -U pip=={} setuptools --no-cache-dir'.format(os.environ['conf_pip_version']))
-            sudo('python3.5 -m pip install boto3 --no-cache-dir')
-            sudo('python3.5 -m pip install fabvenv fabric-virtualenv future --no-cache-dir')
+            conn.sudo('python3.5 -m pip install -U pip=={} setuptools --no-cache-dir'.format(os.environ['conf_pip_version']))
+            conn.sudo('python3.5 -m pip install boto3 --no-cache-dir')
+            conn.sudo('python3.5 -m pip install fabvenv fabric-virtualenv future --no-cache-dir')
             try:
-                sudo('python3.5 -m pip install tornado=={0} ipython==7.9.0 ipykernel=={1} --no-cache-dir' \
+                conn.sudo('python3.5 -m pip install tornado=={0} ipython==7.9.0 ipykernel=={1} --no-cache-dir' \
                      .format(os.environ['notebook_tornado_version'], os.environ['notebook_ipykernel_version']))
             except:
-                sudo('python3.5 -m pip install tornado=={0} ipython==5.0.0 ipykernel=={1} --no-cache-dir' \
+                conn.sudo('python3.5 -m pip install tornado=={0} ipython==5.0.0 ipykernel=={1} --no-cache-dir' \
                      .format(os.environ['notebook_tornado_version'], os.environ['notebook_ipykernel_version']))
-            sudo('touch /home/' + os_user + '/.ensure_dir/python3_libraries_ensured')
+            conn.sudo('touch /home/' + os_user + '/.ensure_dir/python3_libraries_ensured')
         except:
             sys.exit(1)
 
@@ -232,61 +232,61 @@ def install_tensor(os_user, cuda_version, cuda_file_name,
     if not exists('/home/{}/.ensure_dir/tensor_ensured'.format(os_user)):
         try:
             # install nvidia drivers
-            sudo('echo "blacklist nouveau" >> /etc/modprobe.d/blacklist-nouveau.conf')
-            sudo('echo "options nouveau modeset=0" >> /etc/modprobe.d/blacklist-nouveau.conf')
-            sudo('dracut --force')
+            conn.sudo('echo "blacklist nouveau" >> /etc/modprobe.d/blacklist-nouveau.conf')
+            conn.sudo('echo "options nouveau modeset=0" >> /etc/modprobe.d/blacklist-nouveau.conf')
+            conn.sudo('dracut --force')
             with settings(warn_only=True):
                 reboot(wait=150)
             manage_pkg('-y install', 'remote', 'libglvnd-opengl libglvnd-devel dkms gcc kernel-devel-$(uname -r) kernel-headers-$(uname -r)')
-            sudo('wget http://us.download.nvidia.com/XFree86/Linux-x86_64/{0}/NVIDIA-Linux-x86_64-{0}.run -O /home/{1}/NVIDIA-Linux-x86_64-{0}.run'.format(nvidia_version, os_user))
-            sudo('/bin/bash /home/{0}/NVIDIA-Linux-x86_64-{1}.run -s --dkms'.format(os_user, nvidia_version))
-            sudo('rm -f /home/{0}/NVIDIA-Linux-x86_64-{1}.run'.format(os_user, nvidia_version))
+            conn.sudo('wget http://us.download.nvidia.com/XFree86/Linux-x86_64/{0}/NVIDIA-Linux-x86_64-{0}.run -O /home/{1}/NVIDIA-Linux-x86_64-{0}.run'.format(nvidia_version, os_user))
+            conn.sudo('/bin/bash /home/{0}/NVIDIA-Linux-x86_64-{1}.run -s --dkms'.format(os_user, nvidia_version))
+            conn.sudo('rm -f /home/{0}/NVIDIA-Linux-x86_64-{1}.run'.format(os_user, nvidia_version))
             # install cuda
-            sudo('python3.5 -m pip install --upgrade pip=={0} wheel numpy=={1} --no-cache-dir'. format(os.environ['conf_pip_version'], os.environ['notebook_numpy_version']))
-            sudo('wget -P /opt https://developer.nvidia.com/compute/cuda/{0}/prod/local_installers/{1}'.format(cuda_version, cuda_file_name))
-            sudo('sh /opt/{} --silent --toolkit'.format(cuda_file_name))
-            sudo('mv /usr/local/cuda-{} /opt/'.format(cuda_version))
-            sudo('ln -s /opt/cuda-{0} /usr/local/cuda-{0}'.format(cuda_version))
-            sudo('rm -f /opt/{}'.format(cuda_file_name))
+            conn.sudo('python3.5 -m pip install --upgrade pip=={0} wheel numpy=={1} --no-cache-dir'. format(os.environ['conf_pip_version'], os.environ['notebook_numpy_version']))
+            conn.sudo('wget -P /opt https://developer.nvidia.com/compute/cuda/{0}/prod/local_installers/{1}'.format(cuda_version, cuda_file_name))
+            conn.sudo('sh /opt/{} --silent --toolkit'.format(cuda_file_name))
+            conn.sudo('mv /usr/local/cuda-{} /opt/'.format(cuda_version))
+            conn.sudo('ln -s /opt/cuda-{0} /usr/local/cuda-{0}'.format(cuda_version))
+            conn.sudo('rm -f /opt/{}'.format(cuda_file_name))
             # install cuDNN
             run('wget http://developer.download.nvidia.com/compute/redist/cudnn/v{0}/{1} -O /tmp/{1}'.format(cudnn_version, cudnn_file_name))
             run('tar xvzf /tmp/{} -C /tmp'.format(cudnn_file_name))
-            sudo('mkdir -p /opt/cudnn/include')
-            sudo('mkdir -p /opt/cudnn/lib64')
-            sudo('mv /tmp/cuda/include/cudnn.h /opt/cudnn/include')
-            sudo('mv /tmp/cuda/lib64/libcudnn* /opt/cudnn/lib64')
-            sudo('chmod a+r /opt/cudnn/include/cudnn.h /opt/cudnn/lib64/libcudnn*')
+            conn.sudo('mkdir -p /opt/cudnn/include')
+            conn.sudo('mkdir -p /opt/cudnn/lib64')
+            conn.sudo('mv /tmp/cuda/include/cudnn.h /opt/cudnn/include')
+            conn.sudo('mv /tmp/cuda/lib64/libcudnn* /opt/cudnn/lib64')
+            conn.sudo('chmod a+r /opt/cudnn/include/cudnn.h /opt/cudnn/lib64/libcudnn*')
             run('echo "export LD_LIBRARY_PATH=\"$LD_LIBRARY_PATH:/opt/cudnn/lib64:/usr/local/cuda/lib64\"" >> ~/.bashrc')
             # install TensorFlow and run TensorBoard
-            sudo('wget https://storage.googleapis.com/tensorflow/linux/gpu/tensorflow_gpu-{}-cp27-none-linux_x86_64.whl'.format(tensorflow_version))
-            sudo('wget https://storage.googleapis.com/tensorflow/linux/gpu/tensorflow_gpu-{}-cp35-cp35m-linux_x86_64.whl'.format(tensorflow_version))
-            sudo('python3.8 -m pip install --upgrade tensorflow_gpu-{}-cp35-cp35m-linux_x86_64.whl --no-cache-dir'.format(tensorflow_version))
-            sudo('rm -rf /home/{}/tensorflow_gpu-*'.format(os_user))
-            sudo('mkdir /var/log/tensorboard; chown {0}:{0} -R /var/log/tensorboard'.format(os_user))
-            put('{}tensorboard.service'.format(templates_dir), '/tmp/tensorboard.service')
-            sudo("sed -i 's|OS_USR|{}|' /tmp/tensorboard.service".format(os_user))
-            sudo("chmod 644 /tmp/tensorboard.service")
-            sudo('\cp /tmp/tensorboard.service /etc/systemd/system/')
-            sudo("systemctl daemon-reload")
-            sudo("systemctl enable tensorboard")
-            sudo("systemctl start tensorboard")
-            sudo('touch /home/{}/.ensure_dir/tensor_ensured'.format(os_user))
+            conn.sudo('wget https://storage.googleapis.com/tensorflow/linux/gpu/tensorflow_gpu-{}-cp27-none-linux_x86_64.whl'.format(tensorflow_version))
+            conn.sudo('wget https://storage.googleapis.com/tensorflow/linux/gpu/tensorflow_gpu-{}-cp35-cp35m-linux_x86_64.whl'.format(tensorflow_version))
+            conn.sudo('python3.8 -m pip install --upgrade tensorflow_gpu-{}-cp35-cp35m-linux_x86_64.whl --no-cache-dir'.format(tensorflow_version))
+            conn.sudo('rm -rf /home/{}/tensorflow_gpu-*'.format(os_user))
+            conn.sudo('mkdir /var/log/tensorboard; chown {0}:{0} -R /var/log/tensorboard'.format(os_user))
+            conn.put('{}tensorboard.service'.format(templates_dir), '/tmp/tensorboard.service')
+            conn.sudo("sed -i 's|OS_USR|{}|' /tmp/tensorboard.service".format(os_user))
+            conn.sudo("chmod 644 /tmp/tensorboard.service")
+            conn.sudo('\cp /tmp/tensorboard.service /etc/systemd/system/')
+            conn.sudo("systemctl daemon-reload")
+            conn.sudo("systemctl enable tensorboard")
+            conn.sudo("systemctl start tensorboard")
+            conn.sudo('touch /home/{}/.ensure_dir/tensor_ensured'.format(os_user))
         except:
             sys.exit(1)
 
 
 def install_maven(os_user):
     if not exists('/home/' + os_user + '/.ensure_dir/maven_ensured'):
-        sudo('wget http://apache.volia.net/maven/maven-3/3.3.9/binaries/apache-maven-3.3.9-bin.tar.gz -O /tmp/maven.tar.gz')
-        sudo('tar -zxvf /tmp/maven.tar.gz -C /opt/')
-        sudo('ln -fs /opt/apache-maven-3.3.9/bin/mvn /usr/bin/mvn')
-        sudo('touch /home/' + os_user + '/.ensure_dir/maven_ensured')
+        conn.sudo('wget http://apache.volia.net/maven/maven-3/3.3.9/binaries/apache-maven-3.3.9-bin.tar.gz -O /tmp/maven.tar.gz')
+        conn.sudo('tar -zxvf /tmp/maven.tar.gz -C /opt/')
+        conn.sudo('ln -fs /opt/apache-maven-3.3.9/bin/mvn /usr/bin/mvn')
+        conn.sudo('touch /home/' + os_user + '/.ensure_dir/maven_ensured')
 
 
 def install_livy_dependencies(os_user):
     if not exists('/home/' + os_user + '/.ensure_dir/livy_dependencies_ensured'):
-        sudo('pip3.5 install cloudpickle requests requests-kerberos flake8 flaky pytest --no-cache-dir')
-        sudo('touch /home/' + os_user + '/.ensure_dir/livy_dependencies_ensured')
+        conn.sudo('pip3.5 install cloudpickle requests requests-kerberos flake8 flaky pytest --no-cache-dir')
+        conn.sudo('touch /home/' + os_user + '/.ensure_dir/livy_dependencies_ensured')
 
 
 def install_maven_emr(os_user):
@@ -305,9 +305,9 @@ def install_livy_dependencies_emr(os_user):
 
 def install_nodejs(os_user):
     if not exists('/home/{}/.ensure_dir/nodejs_ensured'.format(os_user)):
-        sudo('curl -sL https://rpm.nodesource.com/setup_6.x | sudo -E bash -')
+        conn.sudo('curl -sL https://rpm.nodesource.com/setup_6.x | sudo -E bash -')
         manage_pkg('-y install', 'remote', 'nodejs')
-        sudo('touch /home/{}/.ensure_dir/nodejs_ensured'.format(os_user))
+        conn.sudo('touch /home/{}/.ensure_dir/nodejs_ensured'.format(os_user))
 
 
 def install_os_pkg(requisites):
@@ -317,7 +317,7 @@ def install_os_pkg(requisites):
     try:
         print("Updating repositories and installing requested tools: {}".format(requisites))
         manage_pkg('update-minimal --security -y --skip-broken', 'remote', '')
-        sudo('export LC_ALL=C')
+        conn.sudo('export LC_ALL=C')
         for os_pkg in requisites:
             name, vers = os_pkg
             if vers != '' and vers !='N/A':
@@ -328,22 +328,22 @@ def install_os_pkg(requisites):
                 os_pkg = name
             manage_pkg('-y install', 'remote', '{0} --nogpgcheck 2>&1 | tee /tmp/tee.tmp; if ! grep -w -E  "({1})" '
                                                '/tmp/tee.tmp >  /tmp/os_install_{2}.log; then  echo "" > /tmp/os_install_{2}.log;fi'.format(os_pkg, error_parser, name))
-            install_output = sudo('cat /tmp/tee.tmp')
-            err = sudo('cat /tmp/os_install_{}.log'.format(name)).replace('"', "'")
-            sudo('cat /tmp/tee.tmp | if ! grep -w -E -A 30 "({1})" /tmp/tee.tmp > '
+            install_output = conn.sudo('cat /tmp/tee.tmp')
+            err = conn.sudo('cat /tmp/os_install_{}.log'.format(name)).replace('"', "'")
+            conn.sudo('cat /tmp/tee.tmp | if ! grep -w -E -A 30 "({1})" /tmp/tee.tmp > '
                  '/tmp/os_install_{0}.log; then echo "" > /tmp/os_install_{0}.log;fi'.format(name, new_pkgs_parser))
-            dep = sudo('cat /tmp/os_install_{}.log'.format(name))
+            dep = conn.sudo('cat /tmp/os_install_{}.log'.format(name))
             if dep == '':
                 dep = []
             else:
                 dep = dep[len(new_pkgs_parser): dep.find("Complete!") - 1].replace('  ', '').strip().split('\r\n')
                 for n, i in enumerate(dep):
                     i = i.split('.')[0]
-                    sudo('yum info {0} 2>&1 | if ! grep Version > /tmp/os_install_{0}.log; then echo "" > /tmp/os_install_{0}.log;fi'.format(i))
+                    conn.sudo('yum info {0} 2>&1 | if ! grep Version > /tmp/os_install_{0}.log; then echo "" > /tmp/os_install_{0}.log;fi'.format(i))
                     dep[n] =sudo('cat /tmp/os_install_{}.log'.format(i)).replace('Version     : ', '{} v.'.format(i))
                 dep = [i for i in dep if i]
             versions = []
-            res = sudo(
+            res = conn.sudo(
                 'python3 -c "import os,sys,yum; yb = yum.YumBase(); pl = yb.doPackageLists(); print [pkg.vr for pkg in pl.installed if pkg.name == \'{0}\']"'.format(
                     name)).split('\r\n')[1]
             if err:
@@ -352,7 +352,7 @@ def install_os_pkg(requisites):
                 version = res.split("'")[1].split("-")[0]
                 status_msg = "installed"
             if 'No package {} available'.format(os_pkg) in install_output:
-                versions = sudo('yum --showduplicates list ' + name + ' | expand | grep ' + name + ' | awk \'{print $2}\'').replace('\r\n', '')
+                versions = conn.sudo('yum --showduplicates list ' + name + ' | expand | grep ' + name + ' | awk \'{print $2}\'').replace('\r\n', '')
                 if versions and versions != 'Error: No matching Packages to list':
                     versions = versions.split(' ')
                     status_msg = 'invalid_version'
@@ -386,7 +386,7 @@ def get_available_os_pkgs():
     try:
         manage_pkg('update-minimal --security -y --skip-broken', 'remote', '')
         downgrade_python_version()
-        yum_raw = sudo('python3 -c "import os,sys,yum; yb = yum.YumBase(); pl = yb.doPackageLists(); print {pkg.name:pkg.vr for pkg in pl.available}"')
+        yum_raw = conn.sudo('python3 -c "import os,sys,yum; yb = yum.YumBase(); pl = yb.doPackageLists(); print {pkg.name:pkg.vr for pkg in pl.available}"')
         yum_re = re.sub\
             (r'\w*\s\w*\D\s\w*.\w*.\s\w*.\w*.\w.\w*.\w*.\w*', '', yum_raw)
         yum_list = yum_re.replace("'", "\"")
@@ -400,8 +400,8 @@ def get_available_os_pkgs():
 def install_opencv(os_user):
     if not exists('/home/{}/.ensure_dir/opencv_ensured'.format(os_user)):
         manage_pkg('-y install', 'remote', 'cmake python34 python34-devel python34-pip gcc gcc-c++')
-        sudo('pip3.4 install numpy=={} --no-cache-dir'.format(os.environ['notebook_numpy_version']))
-        sudo('pip3.5 install numpy=={} --no-cache-dir'.format(os.environ['notebook_numpy_version']))
+        conn.sudo('pip3.4 install numpy=={} --no-cache-dir'.format(os.environ['notebook_numpy_version']))
+        conn.sudo('pip3.5 install numpy=={} --no-cache-dir'.format(os.environ['notebook_numpy_version']))
         run('git clone https://github.com/opencv/opencv.git')
         with cd('/home/{}/opencv/'.format(os_user)):
             run('git checkout 3.2.0')
@@ -409,8 +409,8 @@ def install_opencv(os_user):
         with cd('/home/{}/opencv/release/'.format(os_user)):
             run('cmake -DINSTALL_TESTS=OFF -D CUDA_GENERATION=Auto -D CMAKE_BUILD_TYPE=RELEASE -D CMAKE_INSTALL_PREFIX=$(python2 -c "import sys; print(sys.prefix)") -D PYTHON_EXECUTABLE=$(which python2) ..')
             run('make -j$(nproc)')
-            sudo('make install')
-        sudo('touch /home/' + os_user + '/.ensure_dir/opencv_ensured')
+            conn.sudo('make install')
+        conn.sudo('touch /home/' + os_user + '/.ensure_dir/opencv_ensured')
 
 
 def install_caffe2(os_user, caffe2_version, cmake_version):
@@ -418,52 +418,52 @@ def install_caffe2(os_user, caffe2_version, cmake_version):
         env.shell = "/bin/bash -l -c -i"
         manage_pkg('update-minimal --security -y', 'remote', '')
         manage_pkg('-y install --nogpgcheck', 'remote', 'automake cmake3 gcc gcc-c++ kernel-devel leveldb-devel lmdb-devel libtool protobuf-devel graphviz')
-        sudo('pip3.5 install flask graphviz hypothesis jupyter matplotlib==2.0.2 numpy=={} protobuf pydot python-nvd3 pyyaml '
+        conn.sudo('pip3.5 install flask graphviz hypothesis jupyter matplotlib==2.0.2 numpy=={} protobuf pydot python-nvd3 pyyaml '
              'requests scikit-image scipy setuptools tornado future --no-cache-dir'.format(os.environ['notebook_numpy_version']))
-        sudo('cp /opt/cudnn/include/* /opt/cuda-8.0/include/')
-        sudo('cp /opt/cudnn/lib64/* /opt/cuda-8.0/lib64/')
-        sudo('wget https://cmake.org/files/v{2}/cmake-{1}.tar.gz -O /home/{0}/cmake-{1}.tar.gz'.format(
+        conn.sudo('cp /opt/cudnn/include/* /opt/cuda-8.0/include/')
+        conn.sudo('cp /opt/cudnn/lib64/* /opt/cuda-8.0/lib64/')
+        conn.sudo('wget https://cmake.org/files/v{2}/cmake-{1}.tar.gz -O /home/{0}/cmake-{1}.tar.gz'.format(
             os_user, cmake_version, cmake_version.split('.')[0] + "." + cmake_version.split('.')[1]))
-        sudo('tar -zxvf cmake-{}.tar.gz'.format(cmake_version))
+        conn.sudo('tar -zxvf cmake-{}.tar.gz'.format(cmake_version))
         with cd('/home/{}/cmake-{}/'.format(os_user, cmake_version)):
-            sudo('./bootstrap --prefix=/usr/local && make && make install')
-        sudo('ln -s /usr/local/bin/cmake /bin/cmake{}'.format(cmake_version))
-        sudo('git clone https://github.com/pytorch/pytorch.git')
+            conn.sudo('./bootstrap --prefix=/usr/local && make && make install')
+        conn.sudo('ln -s /usr/local/bin/cmake /bin/cmake{}'.format(cmake_version))
+        conn.sudo('git clone https://github.com/pytorch/pytorch.git')
         with cd('/home/{}/pytorch/'.format(os_user)):
-            sudo('git submodule update --init')
+            conn.sudo('git submodule update --init')
             with settings(warn_only=True):
-                sudo('git checkout v{}'.format(caffe2_version))
-                sudo('git submodule update --recursive')
-            sudo('mkdir build && cd build && cmake{} .. && make "-j$(nproc)" install'.format(cmake_version))
-        sudo('touch /home/' + os_user + '/.ensure_dir/caffe2_ensured')
+                conn.sudo('git checkout v{}'.format(caffe2_version))
+                conn.sudo('git submodule update --recursive')
+            conn.sudo('mkdir build && cd build && cmake{} .. && make "-j$(nproc)" install'.format(cmake_version))
+        conn.sudo('touch /home/' + os_user + '/.ensure_dir/caffe2_ensured')
 
 
 def install_cntk(os_user, cntk_version):
     if not exists('/home/{}/.ensure_dir/cntk_ensured'.format(os_user)):
-        sudo('echo "exclude=*.i386 *.i686" >> /etc/yum.conf')
+        conn.sudo('echo "exclude=*.i386 *.i686" >> /etc/yum.conf')
         manage_pkg('clean', 'remote', 'all')
         manage_pkg('update-minimal --security -y', 'remote', '')
         manage_pkg('-y install --nogpgcheck', 'remote', 'openmpi openmpi-devel')
-        sudo('pip3.5 install https://cntk.ai/PythonWheel/GPU/cntk-{}-cp35-cp35m-linux_x86_64.whl --no-cache-dir'.format(cntk_version))
-        sudo('touch /home/{}/.ensure_dir/cntk_ensured'.format(os_user))
+        conn.sudo('pip3.5 install https://cntk.ai/PythonWheel/GPU/cntk-{}-cp35-cp35m-linux_x86_64.whl --no-cache-dir'.format(cntk_version))
+        conn.sudo('touch /home/{}/.ensure_dir/cntk_ensured'.format(os_user))
 
 
 def install_keras(os_user, keras_version):
     if not exists('/home/{}/.ensure_dir/keras_ensured'.format(os_user)):
-        sudo('pip3.5 install keras=={} --no-cache-dir'.format(keras_version))
-        sudo('touch /home/{}/.ensure_dir/keras_ensured'.format(os_user))
+        conn.sudo('pip3.5 install keras=={} --no-cache-dir'.format(keras_version))
+        conn.sudo('touch /home/{}/.ensure_dir/keras_ensured'.format(os_user))
 
 
 def install_theano(os_user, theano_version):
     if not exists('/home/{}/.ensure_dir/theano_ensured'.format(os_user)):
-        sudo('python3.8 -m pip install Theano=={} --no-cache-dir'.format(theano_version))
-        sudo('touch /home/{}/.ensure_dir/theano_ensured'.format(os_user))
+        conn.sudo('python3.8 -m pip install Theano=={} --no-cache-dir'.format(theano_version))
+        conn.sudo('touch /home/{}/.ensure_dir/theano_ensured'.format(os_user))
 
 
 def install_mxnet(os_user, mxnet_version):
     if not exists('/home/{}/.ensure_dir/mxnet_ensured'.format(os_user)):
-        sudo('pip3.5 install mxnet-cu80=={} opencv-python --no-cache-dir'.format(mxnet_version))
-        sudo('touch /home/{}/.ensure_dir/mxnet_ensured'.format(os_user))
+        conn.sudo('pip3.5 install mxnet-cu80=={} opencv-python --no-cache-dir'.format(mxnet_version))
+        conn.sudo('touch /home/{}/.ensure_dir/mxnet_ensured'.format(os_user))
 
 
 #def install_torch(os_user):
@@ -473,12 +473,12 @@ def install_mxnet(os_user, mxnet_version):
 #            manage_pkg('-y install --nogpgcheck', 'remote', 'cmake curl readline-devel ncurses-devel gcc-c++ gcc-gfortran git gnuplot unzip libjpeg-turbo-devel libpng-devel ImageMagick GraphicsMagick-devel fftw-devel sox-devel sox zeromq3-devel qt-devel qtwebkit-devel sox-plugins-freeworld qt-devel')
 #            run('./install.sh -b')
 #        run('source /home/{}/.bashrc'.format(os_user))
-#        sudo('touch /home/{}/.ensure_dir/torch_ensured'.format(os_user))
+#        conn.sudo('touch /home/{}/.ensure_dir/torch_ensured'.format(os_user))
 
 
 def install_gitlab_cert(os_user, certfile):
     try:
-        sudo('mv -f /home/{0}/{1} /etc/pki/ca-trust/source/anchors/{1}'.format(os_user, certfile))
-        sudo('update-ca-trust')
+        conn.sudo('mv -f /home/{0}/{1} /etc/pki/ca-trust/source/anchors/{1}'.format(os_user, certfile))
+        conn.sudo('update-ca-trust')
     except Exception as err:
         print('Failed to install gitlab certificate.{}'.format(str(err)))
diff --git a/infrastructure-provisioning/src/general/lib/os/redhat/ssn_lib.py b/infrastructure-provisioning/src/general/lib/os/redhat/ssn_lib.py
index 909e7b8..d3823e9 100644
--- a/infrastructure-provisioning/src/general/lib/os/redhat/ssn_lib.py
+++ b/infrastructure-provisioning/src/general/lib/os/redhat/ssn_lib.py
@@ -41,20 +41,20 @@ def ensure_docker_daemon(datalab_path, os_user, region):
             else:
                 mirror = 'mirror.centos.org'
             with cd('/etc/yum.repos.d/'):
-                sudo('echo "[centosrepo]" > centos.repo')
-                sudo('echo "name=Centos 7 Repository" >> centos.repo')
-                sudo('echo "baseurl=http://{}/centos/7/extras/x86_64/" >> centos.repo'.format(mirror))
-                sudo('echo "enabled=1" >> centos.repo')
-                sudo('echo "gpgcheck=1" >> centos.repo')
-                sudo('echo "gpgkey=http://{}/centos/7/os/x86_64/RPM-GPG-KEY-CentOS-7" >> centos.repo'.format(mirror))
-            sudo('yum-config-manager --add-repo https://download.docker.com/linux/centos/docker-ce.repo')
+                conn.sudo('echo "[centosrepo]" > centos.repo')
+                conn.sudo('echo "name=Centos 7 Repository" >> centos.repo')
+                conn.sudo('echo "baseurl=http://{}/centos/7/extras/x86_64/" >> centos.repo'.format(mirror))
+                conn.sudo('echo "enabled=1" >> centos.repo')
+                conn.sudo('echo "gpgcheck=1" >> centos.repo')
+                conn.sudo('echo "gpgkey=http://{}/centos/7/os/x86_64/RPM-GPG-KEY-CentOS-7" >> centos.repo'.format(mirror))
+            conn.sudo('yum-config-manager --add-repo https://download.docker.com/linux/centos/docker-ce.repo')
             manage_pkg('update-minimal --security -y', 'remote', '')
             manage_pkg('-y install', 'remote', 'container-selinux')
             manage_pkg('-y install', 'remote', 'docker-ce-{}.ce'.format(docker_version))
-            sudo('usermod -aG docker {}'.format(os_user))
-            sudo('systemctl enable docker.service')
-            sudo('systemctl start docker')
-            sudo('touch {}tmp/docker_daemon_ensured'.format(datalab_path))
+            conn.sudo('usermod -aG docker {}'.format(os_user))
+            conn.sudo('systemctl enable docker.service')
+            conn.sudo('systemctl start docker')
+            conn.sudo('touch {}tmp/docker_daemon_ensured'.format(datalab_path))
         return True
     except:
         return False
@@ -64,9 +64,9 @@ def ensure_nginx(datalab_path):
     try:
         if not exists('{}tmp/nginx_ensured'.format(datalab_path)):
             manage_pkg('-y install', 'remote', 'nginx')
-            sudo('systemctl restart nginx.service')
-            sudo('chkconfig nginx on')
-            sudo('touch {}tmp/nginx_ensured'.format(datalab_path))
+            conn.sudo('systemctl restart nginx.service')
+            conn.sudo('chkconfig nginx on')
+            conn.sudo('touch {}tmp/nginx_ensured'.format(datalab_path))
     except Exception as err:
         traceback.print_exc()
         print('Failed to ensure Nginx: ', str(err))
@@ -76,14 +76,14 @@ def ensure_nginx(datalab_path):
 def ensure_jenkins(datalab_path):
     try:
         if not exists('{}tmp/jenkins_ensured'.format(datalab_path)):
-            sudo('wget -O /etc/yum.repos.d/jenkins.repo https://pkg.jenkins.io/redhat-stable/jenkins.repo')
+            conn.sudo('wget -O /etc/yum.repos.d/jenkins.repo https://pkg.jenkins.io/redhat-stable/jenkins.repo')
             try:
-                sudo('rpm --import https://pkg.jenkins.io/redhat-stable/jenkins.io.key')
+                conn.sudo('rpm --import https://pkg.jenkins.io/redhat-stable/jenkins.io.key')
             except:
                 pass
             manage_pkg('-y install', 'remote', 'jenkins')
             manage_pkg('-y install', 'remote', 'policycoreutils-python')
-            sudo('touch {}tmp/jenkins_ensured'.format(datalab_path))
+            conn.sudo('touch {}tmp/jenkins_ensured'.format(datalab_path))
     except Exception as err:
         traceback.print_exc()
         print('Failed to ensure Jenkins: ', str(err))
@@ -93,26 +93,26 @@ def ensure_jenkins(datalab_path):
 def configure_jenkins(datalab_path, os_user, config, tag_resource_id):
     try:
         if not exists('{}tmp/jenkins_configured'.format(datalab_path)):
-            sudo('rm -rf /var/lib/jenkins/*')
-            sudo('mkdir -p /var/lib/jenkins/jobs/')
-            sudo('chown -R {0}:{0} /var/lib/jenkins/'.format(os_user))
-            put('/root/templates/jenkins_jobs/*', '/var/lib/jenkins/jobs/')
-            # sudo("find /var/lib/jenkins/jobs/ -type f | xargs sed -i \'s/OS_USR/{}/g\'".format(os_user))
-            sudo(
+            conn.sudo('rm -rf /var/lib/jenkins/*')
+            conn.sudo('mkdir -p /var/lib/jenkins/jobs/')
+            conn.sudo('chown -R {0}:{0} /var/lib/jenkins/'.format(os_user))
+            conn.put('/root/templates/jenkins_jobs/*', '/var/lib/jenkins/jobs/')
+            # conn.sudo("find /var/lib/jenkins/jobs/ -type f | xargs sed -i \'s/OS_USR/{}/g\'".format(os_user))
+            conn.sudo(
                 "find /var/lib/jenkins/jobs/ -type f | xargs sed -i \'s/OS_USR/{}/g; s/SBN/{}/g; s/CTUN/{}/g; s/SGI/{}/g; s/VPC/{}/g; s/SNI/{}/g; s/AKEY/{}/g\'".format(
                     os_user, config['service_base_name'], tag_resource_id, config['security_group_id'],
                     config['vpc_id'], config['subnet_id'], config['admin_key']))
-            sudo('chown -R jenkins:jenkins /var/lib/jenkins')
-            sudo('/etc/init.d/jenkins stop; sleep 5')
-            sudo(
+            conn.sudo('chown -R jenkins:jenkins /var/lib/jenkins')
+            conn.sudo('/etc/init.d/jenkins stop; sleep 5')
+            conn.sudo(
                 'sed -i \'/JENKINS_PORT/ s/^/#/\' /etc/sysconfig/jenkins; echo \'JENKINS_PORT="8070"\' >> /etc/sysconfig/jenkins')
-            sudo('sed -i \'/JENKINS_ARGS/ s|=""|="--prefix=/jenkins"|\' /etc/sysconfig/jenkins')
-            sudo('semanage port -a -t http_port_t -p tcp 8070')
-            sudo('setsebool -P httpd_can_network_connect 1')
-            sudo('chkconfig jenkins on')
-            sudo('systemctl start jenkins.service')
-            sudo('echo "jenkins ALL = NOPASSWD:ALL" >> /etc/sudoers')
-            sudo('touch {}tmp/jenkins_configured'.format(datalab_path))
+            conn.sudo('sed -i \'/JENKINS_ARGS/ s|=""|="--prefix=/jenkins"|\' /etc/sysconfig/jenkins')
+            conn.sudo('semanage port -a -t http_port_t -p tcp 8070')
+            conn.sudo('setsebool -P httpd_can_network_connect 1')
+            conn.sudo('chkconfig jenkins on')
+            conn.sudo('systemctl start jenkins.service')
+            conn.sudo('echo "jenkins ALL = NOPASSWD:ALL" >> /etc/sudoers')
+            conn.sudo('touch {}tmp/jenkins_configured'.format(datalab_path))
     except Exception as err:
         traceback.print_exc()
         print('Failed to configure Jenkins: ', str(err))
@@ -123,15 +123,15 @@ def configure_nginx(config, datalab_path, hostname):
     try:
         random_file_part = id_generator(size=20)
         if not exists("/etc/nginx/conf.d/nginx_proxy.conf"):
-            sudo('rm -f /etc/nginx/conf.d/*')
-            put(config['nginx_template_dir'] + 'nginx_proxy.conf', '/tmp/nginx_proxy.conf')
-            put(config['nginx_template_dir'] + 'ssn_nginx.conf', '/tmp/nginx.conf')
-            sudo("sed -i 's|SSN_HOSTNAME|" + hostname + "|' /tmp/nginx_proxy.conf")
-            sudo('cat /tmp/nginx.conf > /etc/nginx/nginx.conf')
-            sudo('mv /tmp/nginx_proxy.conf ' + datalab_path + 'tmp/')
-            sudo('\cp ' + datalab_path + 'tmp/nginx_proxy.conf /etc/nginx/conf.d/')
-            sudo('mkdir -p /etc/nginx/locations')
-            sudo('rm -f /etc/nginx/sites-enabled/default')
+            conn.sudo('rm -f /etc/nginx/conf.d/*')
+            conn.put(config['nginx_template_dir'] + 'nginx_proxy.conf', '/tmp/nginx_proxy.conf')
+            conn.put(config['nginx_template_dir'] + 'ssn_nginx.conf', '/tmp/nginx.conf')
+            conn.sudo("sed -i 's|SSN_HOSTNAME|" + hostname + "|' /tmp/nginx_proxy.conf")
+            conn.sudo('cat /tmp/nginx.conf > /etc/nginx/nginx.conf')
+            conn.sudo('mv /tmp/nginx_proxy.conf ' + datalab_path + 'tmp/')
+            conn.sudo('\cp ' + datalab_path + 'tmp/nginx_proxy.conf /etc/nginx/conf.d/')
+            conn.sudo('mkdir -p /etc/nginx/locations')
+            conn.sudo('rm -f /etc/nginx/sites-enabled/default')
     except Exception as err:
         traceback.print_exc()
         print('Failed to configure Nginx: ', str(err))
@@ -145,17 +145,17 @@ def configure_nginx(config, datalab_path, hostname):
                 with open(template_file) as tpl:
                     for line in tpl:
                         out.write(line)
-            put("/tmp/%s-tmpproxy_location_jenkins_template.conf" % random_file_part,
+            conn.put("/tmp/%s-tmpproxy_location_jenkins_template.conf" % random_file_part,
                 '/tmp/proxy_location_jenkins.conf')
-            sudo('\cp /tmp/proxy_location_jenkins.conf /etc/nginx/locations/')
-            sudo("echo 'engineer:" + crypt.crypt(nginx_password, id_generator()) + "' > /etc/nginx/htpasswd")
+            conn.sudo('\cp /tmp/proxy_location_jenkins.conf /etc/nginx/locations/')
+            conn.sudo("echo 'engineer:" + crypt.crypt(nginx_password, id_generator()) + "' > /etc/nginx/htpasswd")
             with open('jenkins_creds.txt', 'w+') as f:
                 f.write("Jenkins credentials: engineer  / " + nginx_password)
     except:
         return False
 
     try:
-        sudo('service nginx reload')
+        conn.sudo('service nginx reload')
         return True
     except:
         return False
@@ -165,10 +165,10 @@ def ensure_supervisor():
     try:
         if not exists('{}tmp/superv_ensured'.format(os.environ['ssn_datalab_path'])):
             manage_pkg('-y install', 'remote', 'supervisor')
-            # sudo('pip install supervisor')
-            sudo('chkconfig supervisord on')
-            sudo('systemctl start supervisord')
-            sudo('touch {}tmp/superv_ensured'.format(os.environ['ssn_datalab_path']))
+            # conn.sudo('pip install supervisor')
+            conn.sudo('chkconfig supervisord on')
+            conn.sudo('systemctl start supervisord')
+            conn.sudo('touch {}tmp/superv_ensured'.format(os.environ['ssn_datalab_path']))
     except Exception as err:
         traceback.print_exc()
         print('Failed to install supervisor: ', str(err))
@@ -178,19 +178,19 @@ def ensure_supervisor():
 def ensure_mongo():
     try:
         if not exists('{}tmp/mongo_ensured'.format(os.environ['ssn_datalab_path'])):
-            sudo('echo -e "[mongodb-org-3.2]\nname=MongoDB Repository'
+            conn.sudo('echo -e "[mongodb-org-3.2]\nname=MongoDB Repository'
                  '\nbaseurl=https://repo.mongodb.org/yum/redhat/7/mongodb-org/3.2/x86_64/'
                  '\ngpgcheck=1'
                  '\nenabled=1'
                  '\ngpgkey=https://www.mongodb.org/static/pgp/server-3.2.asc" '
                  '> /etc/yum.repos.d/mongodb.repo')
             manage_pkg('-y install', 'remote', 'mongodb-org')
-            sudo('semanage port -a -t mongod_port_t -p tcp 27017')
-            sudo('chkconfig mongod on')
-            sudo('echo "d /var/run/mongodb 0755 mongod mongod" > /lib/tmpfiles.d/mongodb.conf')
-            sudo('sudo systemd-tmpfiles --create mongodb.conf')
-            sudo('systemctl start mongod.service')
-            sudo('touch {}tmp/mongo_ensured'.format(os.environ['ssn_datalab_path']))
+            conn.sudo('semanage port -a -t mongod_port_t -p tcp 27017')
+            conn.sudo('chkconfig mongod on')
+            conn.sudo('echo "d /var/run/mongodb 0755 mongod mongod" > /lib/tmpfiles.d/mongodb.conf')
+            conn.sudo('sudo systemd-tmpfiles --create mongodb.conf')
+            conn.sudo('systemctl start mongod.service')
+            conn.sudo('touch {}tmp/mongo_ensured'.format(os.environ['ssn_datalab_path']))
     except Exception as err:
         traceback.print_exc()
         print('Failed to install MongoDB: ', str(err))
@@ -209,18 +209,18 @@ def start_ss(keyfile, host_string, datalab_conf_dir, web_path,
              keycloak_client_secret, keycloak_auth_server_url, report_path=''):
     try:
         if not exists('{}tmp/ss_started'.format(os.environ['ssn_datalab_path'])):
-            java_path = sudo("alternatives --display java | grep 'slave jre: ' | awk '{print $3}'")
+            java_path = conn.sudo("alternatives --display java | grep 'slave jre: ' | awk '{print $3}'")
             supervisor_conf = '/etc/supervisord.d/supervisor_svc.ini'
             local('sed -i "s|MONGO_PASSWORD|{}|g" /root/templates/ssn.yml'.format(mongo_passwd))
             local('sed -i "s|KEYSTORE_PASSWORD|{}|g" /root/templates/ssn.yml'.format(keystore_passwd))
             local('sed -i "s|CLOUD_PROVIDER|{}|g" /root/templates/ssn.yml'.format(cloud_provider))
             local('sed -i "s|\${JRE_HOME}|' + java_path + '|g" /root/templates/ssn.yml')
-            sudo('sed -i "s|KEYNAME|{}|g" {}/webapp/provisioning-service/conf/provisioning.yml'.
+            conn.sudo('sed -i "s|KEYNAME|{}|g" {}/webapp/provisioning-service/conf/provisioning.yml'.
                  format(os.environ['conf_key_name'], datalab_path))
-            put('/root/templates/ssn.yml', '/tmp/ssn.yml')
-            sudo('mv /tmp/ssn.yml ' + os.environ['ssn_datalab_path'] + 'conf/')
-            put('/root/templates/proxy_location_webapp_template.conf', '/tmp/proxy_location_webapp_template.conf')
-            sudo('mv /tmp/proxy_location_webapp_template.conf ' + os.environ['ssn_datalab_path'] + 'tmp/')
+            conn.put('/root/templates/ssn.yml', '/tmp/ssn.yml')
+            conn.sudo('mv /tmp/ssn.yml ' + os.environ['ssn_datalab_path'] + 'conf/')
+            conn.put('/root/templates/proxy_location_webapp_template.conf', '/tmp/proxy_location_webapp_template.conf')
+            conn.sudo('mv /tmp/proxy_location_webapp_template.conf ' + os.environ['ssn_datalab_path'] + 'tmp/')
             if cloud_provider == 'gcp':
                 conf_parameter_name = '--spring.config.location='
                 with open('/root/templates/supervisor_svc.conf', 'r') as f:
@@ -237,41 +237,41 @@ def start_ss(keyfile, host_string, datalab_conf_dir, web_path,
                     .replace('CONF_PARAMETER_NAME', conf_parameter_name)
                 with open('/root/templates/supervisor_svc.conf', 'w') as f:
                     f.write(text)
-            put('/root/templates/supervisor_svc.conf', '/tmp/supervisor_svc.conf')
-            sudo('mv /tmp/supervisor_svc.conf ' + os.environ['ssn_datalab_path'] + 'tmp/')
-            sudo('cp ' + os.environ['ssn_datalab_path'] +
+            conn.put('/root/templates/supervisor_svc.conf', '/tmp/supervisor_svc.conf')
+            conn.sudo('mv /tmp/supervisor_svc.conf ' + os.environ['ssn_datalab_path'] + 'tmp/')
+            conn.sudo('cp ' + os.environ['ssn_datalab_path'] +
                  'tmp/proxy_location_webapp_template.conf /etc/nginx/locations/proxy_location_webapp.conf')
-            sudo('cp ' + os.environ['ssn_datalab_path'] + 'tmp/supervisor_svc.conf {}'.format(supervisor_conf))
-            sudo('sed -i \'s=WEB_APP_DIR={}=\' {}'.format(web_path, supervisor_conf))
+            conn.sudo('cp ' + os.environ['ssn_datalab_path'] + 'tmp/supervisor_svc.conf {}'.format(supervisor_conf))
+            conn.sudo('sed -i \'s=WEB_APP_DIR={}=\' {}'.format(web_path, supervisor_conf))
             try:
-                sudo('mkdir -p /var/log/application')
-                run('mkdir -p /tmp/yml_tmp/')
+                conn.sudo('mkdir -p /var/log/application')
+                conn.run('mkdir -p /tmp/yml_tmp/')
                 for service in ['self-service', 'provisioning-service', 'billing']:
-                    jar = sudo('cd {0}{1}/lib/; find {1}*.jar -type f'.format(web_path, service))
-                    sudo('ln -s {0}{2}/lib/{1} {0}{2}/{2}.jar '.format(web_path, jar, service))
-                    sudo('cp {0}/webapp/{1}/conf/*.yml /tmp/yml_tmp/'.format(datalab_path, service))
+                    jar = conn.sudo('cd {0}{1}/lib/; find {1}*.jar -type f'.format(web_path, service))
+                    conn.sudo('ln -s {0}{2}/lib/{1} {0}{2}/{2}.jar '.format(web_path, jar, service))
+                    conn.sudo('cp {0}/webapp/{1}/conf/*.yml /tmp/yml_tmp/'.format(datalab_path, service))
                 # Replacing Keycloak and cloud parameters
                 for item in json.loads(cloud_params):
                     if "KEYCLOAK_" in item['key']:
-                        sudo('sed -i "s|{0}|{1}|g" /tmp/yml_tmp/self-service.yml'.format(
+                        conn.sudo('sed -i "s|{0}|{1}|g" /tmp/yml_tmp/self-service.yml'.format(
                             item['key'], item['value']))
-                    sudo('sed -i "s|{0}|{1}|g" /tmp/yml_tmp/provisioning.yml'.format(
+                    conn.sudo('sed -i "s|{0}|{1}|g" /tmp/yml_tmp/provisioning.yml'.format(
                         item['key'], item['value']))
-                sudo('sed -i "s|SERVICE_BASE_NAME|{0}|g" /tmp/yml_tmp/self-service.yml'.format(service_base_name))
-                sudo('sed -i "s|OPERATION_SYSTEM|redhat|g" /tmp/yml_tmp/self-service.yml')
-                sudo('sed -i "s|<SSN_INSTANCE_SIZE>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(
+                conn.sudo('sed -i "s|SERVICE_BASE_NAME|{0}|g" /tmp/yml_tmp/self-service.yml'.format(service_base_name))
+                conn.sudo('sed -i "s|OPERATION_SYSTEM|redhat|g" /tmp/yml_tmp/self-service.yml')
+                conn.sudo('sed -i "s|<SSN_INSTANCE_SIZE>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(
                     os.environ['{0}_ssn_instance_size'.format(os.environ['conf_cloud_provider'])]))
                 if os.environ['conf_cloud_provider'] == 'azure':
-                    sudo('sed -i "s|<LOGIN_USE_LDAP>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(ldap_login))
-                    sudo('sed -i "s|<LOGIN_TENANT_ID>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(tenant_id))
-                    sudo('sed -i "s|<LOGIN_APPLICATION_ID>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(application_id))
-                    sudo('sed -i "s|<DATALAB_SUBSCRIPTION_ID>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(
+                    conn.sudo('sed -i "s|<LOGIN_USE_LDAP>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(ldap_login))
+                    conn.sudo('sed -i "s|<LOGIN_TENANT_ID>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(tenant_id))
+                    conn.sudo('sed -i "s|<LOGIN_APPLICATION_ID>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(application_id))
+                    conn.sudo('sed -i "s|<DATALAB_SUBSCRIPTION_ID>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(
                         subscription_id))
-                    sudo('sed -i "s|<MANAGEMENT_API_AUTH_FILE>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(
+                    conn.sudo('sed -i "s|<MANAGEMENT_API_AUTH_FILE>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(
                         authentication_file))
-                    sudo('sed -i "s|<VALIDATE_PERMISSION_SCOPE>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(validate_permission_scope))
-                    sudo('sed -i "s|<LOGIN_APPLICATION_REDIRECT_URL>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(hostname))
-                    sudo('sed -i "s|<LOGIN_PAGE>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(hostname))
+                    conn.sudo('sed -i "s|<VALIDATE_PERMISSION_SCOPE>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(validate_permission_scope))
+                    conn.sudo('sed -i "s|<LOGIN_APPLICATION_REDIRECT_URL>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(hostname))
+                    conn.sudo('sed -i "s|<LOGIN_PAGE>|{0}|g" /tmp/yml_tmp/self-service.yml'.format(hostname))
                     # if os.environ['azure_datalake_enable'] == 'true':
                     #     permission_scope = 'subscriptions/{}/resourceGroups/{}/providers/Microsoft.DataLakeStore/accounts/{}/providers/Microsoft.Authorization/'.format(
                     #         subscription_id, service_base_name, data_lake_name
@@ -280,8 +280,8 @@ def start_ss(keyfile, host_string, datalab_conf_dir, web_path,
                     #     permission_scope = 'subscriptions/{}/resourceGroups/{}/providers/Microsoft.Authorization/'.format(
                     #         subscription_id, service_base_name
                     #     )
-                sudo('mv /tmp/yml_tmp/* ' + os.environ['ssn_datalab_path'] + 'conf/')
-                sudo('rmdir /tmp/yml_tmp/')
+                conn.sudo('mv /tmp/yml_tmp/* ' + os.environ['ssn_datalab_path'] + 'conf/')
+                conn.sudo('rmdir /tmp/yml_tmp/')
             except Exception as err:
                 traceback.print_exc()
                 append_result("Unable to upload webapp jars. ", str(err))
@@ -344,40 +344,40 @@ def start_ss(keyfile, host_string, datalab_conf_dir, web_path,
                                    keycloak_client_id,
                                    keycloak_client_secret,
                                    keycloak_auth_server_url)
-                sudo('python3 /tmp/configure_billing.py {}'.format(params))
+                conn.sudo('python3 /tmp/configure_billing.py {}'.format(params))
 
             try:
                 if os.environ['conf_stepcerts_enabled'] == 'true':
-                    sudo(
+                    conn.sudo(
                         'openssl pkcs12 -export -in /etc/ssl/certs/datalab.crt -inkey /etc/ssl/certs/datalab.key -name ssn '
                         '-out ssn.p12 -password pass:{0}'.format(keystore_passwd))
-                    sudo('keytool -importkeystore -srckeystore ssn.p12 -srcstoretype PKCS12 -alias ssn -destkeystore '
+                    conn.sudo('keytool -importkeystore -srckeystore ssn.p12 -srcstoretype PKCS12 -alias ssn -destkeystore '
                          '/home/{0}/keys/ssn.keystore.jks -deststorepass "{1}" -srcstorepass "{1}"'.format(
                         os_user, keystore_passwd))
-                    sudo('keytool -keystore /home/{0}/keys/ssn.keystore.jks -alias step-ca -import -file '
+                    conn.sudo('keytool -keystore /home/{0}/keys/ssn.keystore.jks -alias step-ca -import -file '
                          '/etc/ssl/certs/root_ca.crt  -deststorepass "{1}" -srcstorepass "{1}" -noprompt'.format(
                         os_user, keystore_passwd))
-                    sudo('keytool -importcert -trustcacerts -alias step-ca -file /etc/ssl/certs/root_ca.crt '
+                    conn.sudo('keytool -importcert -trustcacerts -alias step-ca -file /etc/ssl/certs/root_ca.crt '
                          '-noprompt -storepass changeit -keystore {1}/lib/security/cacerts'.format(os_user, java_path))
-                    sudo('keytool -importcert -trustcacerts -alias ssn -file /etc/ssl/certs/datalab.crt -noprompt '
+                    conn.sudo('keytool -importcert -trustcacerts -alias ssn -file /etc/ssl/certs/datalab.crt -noprompt '
                          '-storepass changeit -keystore {0}/lib/security/cacerts'.format(java_path))
                 else:
                     if os.environ['conf_letsencrypt_enabled'] == 'true':
                         print(
                             'Lets Encrypt certificates are not supported for redhat in datalab. Using self signed certificates')
-                    sudo('keytool -genkeypair -alias ssn -keyalg RSA -validity 730 -storepass {1} -keypass {1} \
+                    conn.sudo('keytool -genkeypair -alias ssn -keyalg RSA -validity 730 -storepass {1} -keypass {1} \
                          -keystore /home/{0}/keys/ssn.keystore.jks -keysize 2048 -dname "CN=localhost"'.format(
                         os_user, keystore_passwd))
-                    sudo('keytool -exportcert -alias ssn -storepass {1} -file /etc/ssl/certs/datalab.crt \
+                    conn.sudo('keytool -exportcert -alias ssn -storepass {1} -file /etc/ssl/certs/datalab.crt \
                          -keystore /home/{0}/keys/ssn.keystore.jks'.format(os_user, keystore_passwd))
-                    sudo('keytool -importcert -trustcacerts -alias ssn -file /etc/ssl/certs/datalab.crt -noprompt \
+                    conn.sudo('keytool -importcert -trustcacerts -alias ssn -file /etc/ssl/certs/datalab.crt -noprompt \
                          -storepass changeit -keystore {1}/lib/security/cacerts'.format(os_user, java_path))
             except:
                 append_result("Unable to generate cert and copy to java keystore")
                 sys.exit(1)
-            sudo('systemctl restart supervisord')
-            sudo('service nginx restart')
-            sudo('touch ' + os.environ['ssn_datalab_path'] + 'tmp/ss_started')
+            conn.sudo('systemctl restart supervisord')
+            conn.sudo('service nginx restart')
+            conn.sudo('touch ' + os.environ['ssn_datalab_path'] + 'tmp/ss_started')
     except Exception as err:
         traceback.print_exc()
         print('Failed to start Self-service: ', str(err))
@@ -390,15 +390,15 @@ def install_build_dep():
             maven_version = '3.5.4'
             manage_pkg('-y install', 'remote', 'java-1.8.0-openjdk java-1.8.0-openjdk-devel git wget unzip')
             with cd('/opt/'):
-                sudo(
+                conn.sudo(
                     'wget http://mirrors.sonic.net/apache/maven/maven-{0}/{1}/binaries/apache-maven-{1}-bin.zip'.format(
                         maven_version.split('.')[0], maven_version))
-                sudo('unzip apache-maven-{}-bin.zip'.format(maven_version))
-                sudo('mv apache-maven-{} maven'.format(maven_version))
-            sudo('bash -c "curl --silent --location https://rpm.nodesource.com/setup_12.x | bash -"')
+                conn.sudo('unzip apache-maven-{}-bin.zip'.format(maven_version))
+                conn.sudo('mv apache-maven-{} maven'.format(maven_version))
+            conn.sudo('bash -c "curl --silent --location https://rpm.nodesource.com/setup_12.x | bash -"')
             manage_pkg('-y install', 'remote', 'nodejs')
-            sudo('npm config set unsafe-perm=true')
-            sudo('touch {}tmp/build_dep_ensured'.format(os.environ['ssn_datalab_path']))
+            conn.sudo('npm config set unsafe-perm=true')
+            conn.sudo('touch {}tmp/build_dep_ensured'.format(os.environ['ssn_datalab_path']))
     except Exception as err:
         traceback.print_exc()
         print('Failed to install build dependencies for UI: ', str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_download_git_certfile.py b/infrastructure-provisioning/src/general/scripts/aws/common_download_git_certfile.py
index f68d530..a1bfbc6 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_download_git_certfile.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_download_git_certfile.py
@@ -44,8 +44,8 @@ if __name__ == "__main__":
                                                project_name, endpoint_name)).lower().replace('_', '-')
     gitlab_certfile = os.environ['conf_gitlab_certfile']
     if datalab.actions_lib.get_gitlab_cert(bucket_name, gitlab_certfile):
-        put(gitlab_certfile, gitlab_certfile)
-        sudo('chown root:root {}'.format(gitlab_certfile))
+        conn.put(gitlab_certfile, gitlab_certfile)
+        conn.sudo('chown root:root {}'.format(gitlab_certfile))
         print('{} has been downloaded'.format(gitlab_certfile))
     else:
         print('There is no {} to download'.format(gitlab_certfile))
diff --git a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_configure.py b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_configure.py
index 58648ee..33af51d 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_configure.py
@@ -82,7 +82,7 @@ def configure_dataengine_service(instance, emr_conf):
             datalab.fab.configure_data_engine_service_pip(emr_conf['instance_ip'], emr_conf['os_user'],
                                                           emr_conf['key_path'], True)
             datalab.fab.init_datalab_connection(emr_conf['instance_ip'], emr_conf['os_user'], emr_conf['key_path'])
-            sudo('echo "[main]" > /etc/yum/pluginconf.d/priorities.conf ; echo "enabled = 0" >> '
+            conn.sudo('echo "[main]" > /etc/yum/pluginconf.d/priorities.conf ; echo "enabled = 0" >> '
                  '/etc/yum/pluginconf.d/priorities.conf')
             manage_pkg('-y install', 'remote', 'R-devel')
             datalab.fab.close_connection()
diff --git a/infrastructure-provisioning/src/general/scripts/aws/jupyter_install_dataengine-service_kernels.py b/infrastructure-provisioning/src/general/scripts/aws/jupyter_install_dataengine-service_kernels.py
index fa2d60d..8660174 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/jupyter_install_dataengine-service_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/jupyter_install_dataengine-service_kernels.py
@@ -50,26 +50,26 @@ def configure_notebook(args):
     templates_dir = '/root/templates/'
     files_dir = '/root/files/'
     scripts_dir = '/root/scripts/'
-    put(templates_dir + 'sparkmagic_config_template.json', '/tmp/sparkmagic_config_template.json')
-    # put(templates_dir + 'pyspark_dataengine-service_template.json', '/tmp/pyspark_dataengine-service_template.json')
-    # put(templates_dir + 'r_dataengine-service_template.json', '/tmp/r_dataengine-service_template.json')
-    # put(templates_dir + 'toree_dataengine-service_template.json','/tmp/toree_dataengine-service_template.json')
-    put(scripts_dir + '{}_dataengine-service_create_configs.py'.format(args.application),
+    conn.put(templates_dir + 'sparkmagic_config_template.json', '/tmp/sparkmagic_config_template.json')
+    # conn.put(templates_dir + 'pyspark_dataengine-service_template.json', '/tmp/pyspark_dataengine-service_template.json')
+    # conn.put(templates_dir + 'r_dataengine-service_template.json', '/tmp/r_dataengine-service_template.json')
+    # conn.put(templates_dir + 'toree_dataengine-service_template.json','/tmp/toree_dataengine-service_template.json')
+    conn.put(scripts_dir + '{}_dataengine-service_create_configs.py'.format(args.application),
         '/tmp/jupyter_dataengine-service_create_configs.py')
-    # put(files_dir + 'toree_kernel.tar.gz', '/tmp/toree_kernel.tar.gz')
-    # put(templates_dir + 'toree_dataengine-service_templatev2.json', '/tmp/toree_dataengine-service_templatev2.json')
-    # put(templates_dir + 'run_template.sh', '/tmp/run_template.sh')
-    sudo(
+    # conn.put(files_dir + 'toree_kernel.tar.gz', '/tmp/toree_kernel.tar.gz')
+    # conn.put(templates_dir + 'toree_dataengine-service_templatev2.json', '/tmp/toree_dataengine-service_templatev2.json')
+    # conn.put(templates_dir + 'run_template.sh', '/tmp/run_template.sh')
+    conn.sudo(
         '\cp /tmp/jupyter_dataengine-service_create_configs.py /usr/local/bin/jupyter_dataengine-service_create_configs.py')
-    sudo('chmod 755 /usr/local/bin/jupyter_dataengine-service_create_configs.py')
-    sudo('mkdir -p /usr/lib/python3.8/datalab/')
-    run('mkdir -p /tmp/datalab_libs/')
+    conn.sudo('chmod 755 /usr/local/bin/jupyter_dataengine-service_create_configs.py')
+    conn.sudo('mkdir -p /usr/lib/python3.8/datalab/')
+    conn.run('mkdir -p /tmp/datalab_libs/')
     local('scp -i {} /usr/lib/python3.8/datalab/*.py {}:/tmp/datalab_libs/'.format(args.keyfile, env.host_string))
-    run('chmod a+x /tmp/datalab_libs/*')
-    sudo('mv /tmp/datalab_libs/* /usr/lib/python3.8/datalab/')
+    conn.run('chmod a+x /tmp/datalab_libs/*')
+    conn.sudo('mv /tmp/datalab_libs/* /usr/lib/python3.8/datalab/')
     if exists('/usr/lib64'):
-        sudo('mkdir -p /usr/lib64/python3.8')
-        sudo('ln -fs /usr/lib/python3.8/datalab /usr/lib64/python3.8/datalab')
+        conn.sudo('mkdir -p /usr/lib64/python3.8')
+        conn.sudo('ln -fs /usr/lib/python3.8/datalab /usr/lib64/python3.8/datalab')
 
 
 if __name__ == "__main__":
@@ -103,7 +103,7 @@ if __name__ == "__main__":
     cluster_id = get_emr_id_by_name(args.cluster_name)
     master_instances = get_emr_instances_list(cluster_id, 'MASTER')
     master_ip = master_instances[0].get('PrivateIpAddress')
-    sudo("/usr/bin/python3 /usr/local/bin/jupyter_dataengine-service_create_configs.py --bucket " + args.bucket
+    conn.sudo("/usr/bin/python3 /usr/local/bin/jupyter_dataengine-service_create_configs.py --bucket " + args.bucket
          + " --cluster_name " + args.cluster_name + " --emr_version " + args.emr_version + " --spark_version "
          + spark_version + " --scala_version " + scala_version + " --r_version " + r_version + " --hadoop_version "
          + hadoop_version + " --region " + args.region + " --excluded_lines '" + args.emr_excluded_spark_properties
diff --git a/infrastructure-provisioning/src/general/scripts/aws/rstudio_install_dataengine-service_kernels.py b/infrastructure-provisioning/src/general/scripts/aws/rstudio_install_dataengine-service_kernels.py
index 111978a..89277c0 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/rstudio_install_dataengine-service_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/rstudio_install_dataengine-service_kernels.py
@@ -46,19 +46,19 @@ args = parser.parse_args()
 
 def configure_notebook(args):
     scripts_dir = '/root/scripts/'
-    put(scripts_dir + '{}_dataengine-service_create_configs.py'.format(args.application),
+    conn.put(scripts_dir + '{}_dataengine-service_create_configs.py'.format(args.application),
         '/tmp/rstudio_dataengine-service_create_configs.py')
-    sudo(
+    conn.sudo(
         '\cp /tmp/rstudio_dataengine-service_create_configs.py /usr/local/bin/rstudio_dataengine-service_create_configs.py')
-    sudo('chmod 755 /usr/local/bin/rstudio_dataengine-service_create_configs.py')
-    sudo('mkdir -p /usr/lib/python3.8/datalab/')
-    run('mkdir -p /tmp/datalab_libs/')
+    conn.sudo('chmod 755 /usr/local/bin/rstudio_dataengine-service_create_configs.py')
+    conn.sudo('mkdir -p /usr/lib/python3.8/datalab/')
+    conn.run('mkdir -p /tmp/datalab_libs/')
     local('scp -i {} /usr/lib/python3.8/datalab/*.py {}:/tmp/datalab_libs/'.format(args.keyfile, env.host_string))
-    run('chmod a+x /tmp/datalab_libs/*')
-    sudo('mv /tmp/datalab_libs/* /usr/lib/python3.8/datalab/')
+    conn.run('chmod a+x /tmp/datalab_libs/*')
+    conn.sudo('mv /tmp/datalab_libs/* /usr/lib/python3.8/datalab/')
     if exists('/usr/lib64'):
-        sudo('mkdir -p /usr/lib64/python3.8')
-        sudo('ln -fs /usr/lib/python3.8/datalab /usr/lib64/python3.8/datalab')
+        conn.sudo('mkdir -p /usr/lib64/python3.8')
+        conn.sudo('ln -fs /usr/lib/python3.8/datalab /usr/lib64/python3.8/datalab')
 
 
 if __name__ == "__main__":
@@ -69,7 +69,7 @@ if __name__ == "__main__":
     configure_notebook(args)
     spark_version = get_spark_version(args.cluster_name)
     hadoop_version = get_hadoop_version(args.cluster_name)
-    sudo("/usr/bin/python3 /usr/local/bin/rstudio_dataengine-service_create_configs.py --bucket " + args.bucket +
+    conn.sudo("/usr/bin/python3 /usr/local/bin/rstudio_dataengine-service_create_configs.py --bucket " + args.bucket +
          " --cluster_name " + args.cluster_name + " --emr_version " + args.emr_version + " --spark_version " +
          spark_version + " --hadoop_version " + hadoop_version + " --region " + args.region + " --excluded_lines '"
          + args.emr_excluded_spark_properties + "' --project_name " + args.project_name + " --os_user " + args.os_user)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/zeppelin_install_dataengine-service_kernels.py b/infrastructure-provisioning/src/general/scripts/aws/zeppelin_install_dataengine-service_kernels.py
index 9469ac1..dfb0a0b 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/zeppelin_install_dataengine-service_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/zeppelin_install_dataengine-service_kernels.py
@@ -49,22 +49,22 @@ def configure_notebook(args):
     templates_dir = '/root/templates/'
     scripts_dir = '/root/scripts/'
     if os.environ['notebook_multiple_clusters'] == 'true':
-        put(templates_dir + 'dataengine-service_interpreter_livy.json', '/tmp/dataengine-service_interpreter.json')
+        conn.put(templates_dir + 'dataengine-service_interpreter_livy.json', '/tmp/dataengine-service_interpreter.json')
     else:
-        put(templates_dir + 'dataengine-service_interpreter_spark.json', '/tmp/dataengine-service_interpreter.json')
-    put(scripts_dir + '{}_dataengine-service_create_configs.py'.format(args.application),
+        conn.put(templates_dir + 'dataengine-service_interpreter_spark.json', '/tmp/dataengine-service_interpreter.json')
+    conn.put(scripts_dir + '{}_dataengine-service_create_configs.py'.format(args.application),
         '/tmp/zeppelin_dataengine-service_create_configs.py')
-    sudo(
+    conn.sudo(
         '\cp /tmp/zeppelin_dataengine-service_create_configs.py /usr/local/bin/zeppelin_dataengine-service_create_configs.py')
-    sudo('chmod 755 /usr/local/bin/zeppelin_dataengine-service_create_configs.py')
-    sudo('mkdir -p /usr/lib/python3.8/datalab/')
-    run('mkdir -p /tmp/datalab_libs/')
+    conn.sudo('chmod 755 /usr/local/bin/zeppelin_dataengine-service_create_configs.py')
+    conn.sudo('mkdir -p /usr/lib/python3.8/datalab/')
+    conn.run('mkdir -p /tmp/datalab_libs/')
     local('scp -i {} /usr/lib/python3.8/datalab/*.py {}:/tmp/datalab_libs/'.format(args.keyfile, env.host_string))
-    run('chmod a+x /tmp/datalab_libs/*')
-    sudo('mv /tmp/datalab_libs/* /usr/lib/python3.8/datalab/')
+    conn.run('chmod a+x /tmp/datalab_libs/*')
+    conn.sudo('mv /tmp/datalab_libs/* /usr/lib/python3.8/datalab/')
     if exists('/usr/lib64'):
-        sudo('mkdir -p /usr/lib64/python3.8')
-        sudo('ln -fs /usr/lib/python3.8/datalab /usr/lib64/python3.8/datalab')
+        conn.sudo('mkdir -p /usr/lib64/python3.8')
+        conn.sudo('ln -fs /usr/lib/python3.8/datalab /usr/lib64/python3.8/datalab')
 
 
 if __name__ == "__main__":
@@ -115,4 +115,4 @@ if __name__ == "__main__":
                 numpy_version,
                 args.application,
                 r_enabled)
-    sudo(command)
\ No newline at end of file
+    conn.sudo(command)
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_download_git_certfile.py b/infrastructure-provisioning/src/general/scripts/azure/common_download_git_certfile.py
index 7a2e16d..9ed2297 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_download_git_certfile.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_download_git_certfile.py
@@ -46,8 +46,8 @@ if __name__ == "__main__":
         if ssn_storage_account_tag == storage_account.tags["Name"]:
             ssn_storage_account_name = storage_account.name
     if AzureActions().download_from_container(resource_group_name, ssn_storage_account_name, container_name, gitlab_certfile):
-        put(gitlab_certfile, gitlab_certfile)
-        sudo('chown root:root {}'.format(gitlab_certfile))
+        conn.put(gitlab_certfile, gitlab_certfile)
+        conn.sudo('chown root:root {}'.format(gitlab_certfile))
         print('{} has been downloaded'.format(gitlab_certfile))
     else:
         print('There is no {} to download'.format(gitlab_certfile))
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_start_notebook.py b/infrastructure-provisioning/src/general/scripts/azure/common_start_notebook.py
index 3f153fb..b740400 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_start_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_start_notebook.py
@@ -90,10 +90,10 @@ if __name__ == "__main__":
             env.host_string = env.user + "@" + env.hosts
             params = '--refresh_token {}'.format(os.environ['azure_user_refresh_token'])
             try:
-                put('~/scripts/common_notebook_update_refresh_token.py', '/tmp/common_notebook_update_refresh_token.py')
-                sudo('mv /tmp/common_notebook_update_refresh_token.py '
+                conn.put('~/scripts/common_notebook_update_refresh_token.py', '/tmp/common_notebook_update_refresh_token.py')
+                conn.sudo('mv /tmp/common_notebook_update_refresh_token.py '
                      '/usr/local/bin/common_notebook_update_refresh_token.py')
-                sudo("/usr/bin/python3 /usr/local/bin/{}.py {}".format('common_notebook_update_refresh_token', params))
+                conn.sudo("/usr/bin/python3 /usr/local/bin/{}.py {}".format('common_notebook_update_refresh_token', params))
             except:
                 traceback.print_exc()
                 raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/azure/rstudio_change_pass.py b/infrastructure-provisioning/src/general/scripts/azure/rstudio_change_pass.py
index e217ab3..ebd32e5 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/rstudio_change_pass.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/rstudio_change_pass.py
@@ -39,7 +39,7 @@ if __name__ == "__main__":
 
     print("Setting password for Rstudio user.")
     try:
-        sudo('echo "{0}:{1}" | chpasswd'.format(args.os_user, args.rstudio_pass))
+        conn.sudo('echo "{0}:{1}" | chpasswd'.format(args.os_user, args.rstudio_pass))
         datalab.fab.close_connection()
     except Exception as err:
         print('Error: {0}'.format(err))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_download_git_certfile.py b/infrastructure-provisioning/src/general/scripts/gcp/common_download_git_certfile.py
index c8714cc..1fe2874 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_download_git_certfile.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_download_git_certfile.py
@@ -39,8 +39,8 @@ if __name__ == "__main__":
                                                os.environ['endpoint_name'])).lower().replace('_', '-')
     gitlab_certfile = os.environ['conf_gitlab_certfile']
     if GCPActions().get_gitlab_cert(bucket_name, gitlab_certfile):
-        put(gitlab_certfile, gitlab_certfile)
-        sudo('chown root:root {}'.format(gitlab_certfile))
+        conn.put(gitlab_certfile, gitlab_certfile)
+        conn.sudo('chown root:root {}'.format(gitlab_certfile))
         print('{} has been downloaded'.format(gitlab_certfile))
     else:
         print('There is no {} to download'.format(gitlab_certfile))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/jupyter_install_dataengine-service_kernels.py b/infrastructure-provisioning/src/general/scripts/gcp/jupyter_install_dataengine-service_kernels.py
index e6353e6..fcff32c 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/jupyter_install_dataengine-service_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/jupyter_install_dataengine-service_kernels.py
@@ -50,24 +50,24 @@ def configure_notebook(args):
     templates_dir = '/root/templates/'
     files_dir = '/root/files/'
     scripts_dir = '/root/scripts/'
-    put(templates_dir + 'sparkmagic_config_template.json', '/tmp/sparkmagic_config_template.json')
-    # put(templates_dir + 'pyspark_dataengine-service_template.json', '/tmp/pyspark_dataengine-service_template.json')
-    # put(templates_dir + 'r_dataengine-service_template.json', '/tmp/r_dataengine-service_template.json')
-    # put(templates_dir + 'toree_dataengine-service_template.json','/tmp/toree_dataengine-service_template.json')
-    put(scripts_dir + '{}_dataengine-service_create_configs.py'.format(args.application), '/tmp/create_configs.py')
-    # put(files_dir + 'toree_kernel.tar.gz', '/tmp/toree_kernel.tar.gz')
-    # put(templates_dir + 'toree_dataengine-service_templatev2.json', '/tmp/toree_dataengine-service_templatev2.json')
-    # put(templates_dir + 'run_template.sh', '/tmp/run_template.sh')
-    sudo('\cp /tmp/create_configs.py /usr/local/bin/create_configs.py')
-    sudo('chmod 755 /usr/local/bin/create_configs.py')
-    sudo('mkdir -p /usr/lib/python3.8/datalab/')
-    run('mkdir -p /tmp/datalab_libs/')
+    conn.put(templates_dir + 'sparkmagic_config_template.json', '/tmp/sparkmagic_config_template.json')
+    # conn.put(templates_dir + 'pyspark_dataengine-service_template.json', '/tmp/pyspark_dataengine-service_template.json')
+    # conn.put(templates_dir + 'r_dataengine-service_template.json', '/tmp/r_dataengine-service_template.json')
+    # conn.put(templates_dir + 'toree_dataengine-service_template.json','/tmp/toree_dataengine-service_template.json')
+    conn.put(scripts_dir + '{}_dataengine-service_create_configs.py'.format(args.application), '/tmp/create_configs.py')
+    # conn.put(files_dir + 'toree_kernel.tar.gz', '/tmp/toree_kernel.tar.gz')
+    # conn.put(templates_dir + 'toree_dataengine-service_templatev2.json', '/tmp/toree_dataengine-service_templatev2.json')
+    # conn.put(templates_dir + 'run_template.sh', '/tmp/run_template.sh')
+    conn.sudo('\cp /tmp/create_configs.py /usr/local/bin/create_configs.py')
+    conn.sudo('chmod 755 /usr/local/bin/create_configs.py')
+    conn.sudo('mkdir -p /usr/lib/python3.8/datalab/')
+    conn.run('mkdir -p /tmp/datalab_libs/')
     local('scp -i {} /usr/lib/python3.8/datalab/*.py {}:/tmp/datalab_libs/'.format(args.keyfile, env.host_string))
-    run('chmod a+x /tmp/datalab_libs/*')
-    sudo('mv /tmp/datalab_libs/* /usr/lib/python3.8/datalab/')
+    conn.run('chmod a+x /tmp/datalab_libs/*')
+    conn.sudo('mv /tmp/datalab_libs/* /usr/lib/python3.8/datalab/')
     if exists('/usr/lib64'):
-        sudo('mkdir -p /usr/lib64/python3.8')
-        sudo('ln -fs /usr/lib/python3.8/datalab /usr/lib64/python3.8/datalab')
+        conn.sudo('mkdir -p /usr/lib64/python3.8')
+        conn.sudo('ln -fs /usr/lib/python3.8/datalab /usr/lib64/python3.8/datalab')
 
 
 if __name__ == "__main__":
@@ -91,9 +91,9 @@ if __name__ == "__main__":
     r_enabled = os.environ['notebook_r_enabled']
     master_host = '{}-m'.format(args.cluster_name)
     master_ip = get_instance_private_ip_address(os.environ['gcp_zone'], master_host)
-    sudo('echo "[global]" > /etc/pip.conf; echo "proxy = $(cat /etc/profile | grep proxy | head -n1 | cut -f2 -d=)" >> /etc/pip.conf')
-    sudo('echo "use_proxy=yes" > ~/.wgetrc; proxy=$(cat /etc/profile | grep proxy | head -n1 | cut -f2 -d=); echo "http_proxy=$proxy" >> ~/.wgetrc; echo "https_proxy=$proxy" >> ~/.wgetrc')
-    sudo('unset http_proxy https_proxy; export gcp_project_id="{0}"; export conf_resource="{1}"; '
+    conn.sudo('echo "[global]" > /etc/pip.conf; echo "proxy = $(cat /etc/profile | grep proxy | head -n1 | cut -f2 -d=)" >> /etc/pip.conf')
+    conn.sudo('echo "use_proxy=yes" > ~/.wgetrc; proxy=$(cat /etc/profile | grep proxy | head -n1 | cut -f2 -d=); echo "http_proxy=$proxy" >> ~/.wgetrc; echo "https_proxy=$proxy" >> ~/.wgetrc')
+    conn.sudo('unset http_proxy https_proxy; export gcp_project_id="{0}"; export conf_resource="{1}"; '
          '/usr/bin/python3 /usr/local/bin/create_configs.py --bucket {2} --cluster_name {3} --dataproc_version {4}'
          ' --spark_version {5} --hadoop_version {6} --region {7} --user_name {8} --os_user {9} --pip_mirror {10} '
          '--application {11} --r_version {12} --r_enabled {13} --python_version {14}  --master_ip {15} --scala_version {16}'
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/rstudio_install_dataengine-service_kernels.py b/infrastructure-provisioning/src/general/scripts/gcp/rstudio_install_dataengine-service_kernels.py
index d296646..682543b 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/rstudio_install_dataengine-service_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/rstudio_install_dataengine-service_kernels.py
@@ -48,17 +48,17 @@ args = parser.parse_args()
 
 def configure_notebook(args):
     scripts_dir = '/root/scripts/'
-    put(scripts_dir + '{}_dataengine-service_create_configs.py'.format(args.application), '/tmp/create_configs.py')
-    sudo('\cp /tmp/create_configs.py /usr/local/bin/create_configs.py')
-    sudo('chmod 755 /usr/local/bin/create_configs.py')
-    sudo('mkdir -p /usr/lib/python3.8/datalab/')
-    run('mkdir -p /tmp/datalab_libs/')
+    conn.put(scripts_dir + '{}_dataengine-service_create_configs.py'.format(args.application), '/tmp/create_configs.py')
+    conn.sudo('\cp /tmp/create_configs.py /usr/local/bin/create_configs.py')
+    conn.sudo('chmod 755 /usr/local/bin/create_configs.py')
+    conn.sudo('mkdir -p /usr/lib/python3.8/datalab/')
+    conn.run('mkdir -p /tmp/datalab_libs/')
     local('scp -i {} /usr/lib/python3.8/datalab/*.py {}:/tmp/datalab_libs/'.format(args.keyfile, env.host_string))
-    run('chmod a+x /tmp/datalab_libs/*')
-    sudo('mv /tmp/datalab_libs/* /usr/lib/python3.8/datalab/')
+    conn.run('chmod a+x /tmp/datalab_libs/*')
+    conn.sudo('mv /tmp/datalab_libs/* /usr/lib/python3.8/datalab/')
     if exists('/usr/lib64'):
-        sudo('mkdir -p /usr/lib64/python3.8')
-        sudo('ln -fs /usr/lib/python3.8/datalab /usr/lib64/python3.8/datalab')
+        conn.sudo('mkdir -p /usr/lib64/python3.8')
+        conn.sudo('ln -fs /usr/lib/python3.8/datalab /usr/lib64/python3.8/datalab')
 
 
 if __name__ == "__main__":
@@ -69,8 +69,8 @@ if __name__ == "__main__":
     configure_notebook(args)
     spark_version = datalab.actions_lib.GCPActions().get_cluster_app_version(args.bucket, args.project_name, args.cluster_name, 'spark')
     hadoop_version = datalab.actions_lib.GCPActions().get_cluster_app_version(args.bucket, args.project_name, args.cluster_name, 'hadoop')
-    sudo('echo "[global]" > /etc/pip.conf; echo "proxy = $(cat /etc/profile | grep proxy | head -n1 | cut -f2 -d=)" >> /etc/pip.conf')
-    sudo('echo "use_proxy=yes" > ~/.wgetrc; proxy=$(cat /etc/profile | grep proxy | head -n1 | cut -f2 -d=); echo "http_proxy=$proxy" >> ~/.wgetrc; echo "https_proxy=$proxy" >> ~/.wgetrc')
-    sudo('unset http_proxy https_proxy; export gcp_project_id="{0}"; export conf_resource="{1}"; /usr/bin/python3 /usr/local/bin/create_configs.py --bucket {2} --cluster_name {3} --dataproc_version {4} --spark_version {5} --hadoop_version {6} --region {7} --user_name {8} --os_user {9} --pip_mirror {10} --application {11}'
+    conn.sudo('echo "[global]" > /etc/pip.conf; echo "proxy = $(cat /etc/profile | grep proxy | head -n1 | cut -f2 -d=)" >> /etc/pip.conf')
+    conn.sudo('echo "use_proxy=yes" > ~/.wgetrc; proxy=$(cat /etc/profile | grep proxy | head -n1 | cut -f2 -d=); echo "http_proxy=$proxy" >> ~/.wgetrc; echo "https_proxy=$proxy" >> ~/.wgetrc')
+    conn.sudo('unset http_proxy https_proxy; export gcp_project_id="{0}"; export conf_resource="{1}"; /usr/bin/python3 /usr/local/bin/create_configs.py --bucket {2} --cluster_name {3} --dataproc_version {4} --spark_version {5} --hadoop_version {6} --region {7} --user_name {8} --os_user {9} --pip_mirror {10} --application {11}'
          .format(os.environ['gcp_project_id'], os.environ['conf_resource'], args.bucket, args.cluster_name, args.dataproc_version, spark_version, hadoop_version,
                  args.region, args.project_name, args.os_user, args.pip_mirror, args.application))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/zeppelin_install_dataengine-service_kernels.py b/infrastructure-provisioning/src/general/scripts/gcp/zeppelin_install_dataengine-service_kernels.py
index 7293330..b153572 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/zeppelin_install_dataengine-service_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/zeppelin_install_dataengine-service_kernels.py
@@ -50,20 +50,20 @@ def configure_notebook(args):
     templates_dir = '/root/templates/'
     scripts_dir = '/root/scripts/'
     if os.environ['notebook_multiple_clusters'] == 'true':
-        put(templates_dir + 'dataengine-service_interpreter_livy.json', '/tmp/dataengine-service_interpreter.json')
+        conn.put(templates_dir + 'dataengine-service_interpreter_livy.json', '/tmp/dataengine-service_interpreter.json')
     else:
-        put(templates_dir + 'dataengine-service_interpreter_spark.json', '/tmp/dataengine-service_interpreter.json')
-    put(scripts_dir + '{}_dataengine-service_create_configs.py'.format(args.application), '/tmp/create_configs.py')
-    sudo('\cp /tmp/create_configs.py /usr/local/bin/create_configs.py')
-    sudo('chmod 755 /usr/local/bin/create_configs.py')
-    sudo('mkdir -p /usr/lib/python3.8/datalab/')
+        conn.put(templates_dir + 'dataengine-service_interpreter_spark.json', '/tmp/dataengine-service_interpreter.json')
+    conn.put(scripts_dir + '{}_dataengine-service_create_configs.py'.format(args.application), '/tmp/create_configs.py')
+    conn.sudo('\cp /tmp/create_configs.py /usr/local/bin/create_configs.py')
+    conn.sudo('chmod 755 /usr/local/bin/create_configs.py')
+    conn.sudo('mkdir -p /usr/lib/python3.8/datalab/')
     run('mkdir -p /tmp/datalab_libs/')
     local('scp -i {} /usr/lib/python3.8/datalab/*.py {}:/tmp/datalab_libs/'.format(args.keyfile, env.host_string))
     run('chmod a+x /tmp/datalab_libs/*')
-    sudo('mv /tmp/datalab_libs/* /usr/lib/python3.8/datalab/')
+    conn.sudo('mv /tmp/datalab_libs/* /usr/lib/python3.8/datalab/')
     if exists('/usr/lib64'):
-        sudo('mkdir -p /usr/lib64/python3.8')
-        sudo('ln -fs /usr/lib/python3.8/datalab /usr/lib64/python3.8/datalab')
+        conn.sudo('mkdir -p /usr/lib64/python3.8')
+        conn.sudo('ln -fs /usr/lib/python3.8/datalab /usr/lib64/python3.8/datalab')
 
 
 if __name__ == "__main__":
@@ -75,8 +75,8 @@ if __name__ == "__main__":
     r_enabled = os.environ['notebook_r_enabled']
     spark_version = datalab.actions_lib.GCPActions().get_cluster_app_version(args.bucket, args.project_name, args.cluster_name, 'spark')
     hadoop_version = datalab.actions_lib.GCPActions().get_cluster_app_version(args.bucket, args.project_name, args.cluster_name, 'hadoop')
-    sudo('echo "[global]" > /etc/pip.conf; echo "proxy = $(cat /etc/profile | grep proxy | head -n1 | cut -f2 -d=)" >> /etc/pip.conf')
-    sudo('echo "use_proxy=yes" > ~/.wgetrc; proxy=$(cat /etc/profile | grep proxy | head -n1 | cut -f2 -d=); echo "http_proxy=$proxy" >> ~/.wgetrc; echo "https_proxy=$proxy" >> ~/.wgetrc')
-    sudo('unset http_proxy https_proxy; export gcp_project_id="{0}"; export conf_resource="{1}"; /usr/bin/python3 /usr/local/bin/create_configs.py --bucket {2} --cluster_name {3} --dataproc_version {4} --spark_version {5} --hadoop_version {6} --region {7} --user_name {8} --os_user {9} --pip_mirror {10} --application {11} --livy_version {12} --multiple_clusters {13} --r_enabled {14}'
+    conn.sudo('echo "[global]" > /etc/pip.conf; echo "proxy = $(cat /etc/profile | grep proxy | head -n1 | cut -f2 -d=)" >> /etc/pip.conf')
+    conn.sudo('echo "use_proxy=yes" > ~/.wgetrc; proxy=$(cat /etc/profile | grep proxy | head -n1 | cut -f2 -d=); echo "http_proxy=$proxy" >> ~/.wgetrc; echo "https_proxy=$proxy" >> ~/.wgetrc')
+    conn.sudo('unset http_proxy https_proxy; export gcp_project_id="{0}"; export conf_resource="{1}"; /usr/bin/python3 /usr/local/bin/create_configs.py --bucket {2} --cluster_name {3} --dataproc_version {4} --spark_version {5} --hadoop_version {6} --region {7} --user_name {8} --os_user {9} --pip_mirror {10} --application {11} --livy_version {12} --multiple_clusters {13} --r_enabled {14}'
          .format(os.environ['gcp_project_id'], os.environ['conf_resource'], args.bucket, args.cluster_name, args.dataproc_version, spark_version, hadoop_version,
                  args.region, args.project_name, args.os_user, args.pip_mirror, args.application, os.environ['notebook_livy_version'], os.environ['notebook_multiple_clusters'], r_enabled))
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/general/scripts/os/check_inactivity.py b/infrastructure-provisioning/src/general/scripts/os/check_inactivity.py
index 8eeb200..ff5f599 100644
--- a/infrastructure-provisioning/src/general/scripts/os/check_inactivity.py
+++ b/infrastructure-provisioning/src/general/scripts/os/check_inactivity.py
@@ -48,7 +48,7 @@ if __name__ == "__main__":
         inactivity_file = 'local_inactivity'
 
     if exists('{}{}'.format(inactivity_dir, inactivity_file)):
-        timestamp = sudo('cat {}{}'.format(inactivity_dir, inactivity_file))
+        timestamp = conn.sudo('cat {}{}'.format(inactivity_dir, inactivity_file))
     else:
         timestamp = '0000000000'
 
diff --git a/infrastructure-provisioning/src/general/scripts/os/common_clean_instance.py b/infrastructure-provisioning/src/general/scripts/os/common_clean_instance.py
index bc751fa..f49d806 100644
--- a/infrastructure-provisioning/src/general/scripts/os/common_clean_instance.py
+++ b/infrastructure-provisioning/src/general/scripts/os/common_clean_instance.py
@@ -37,16 +37,16 @@ args = parser.parse_args()
 
 def general_clean():
     try:
-        sudo('systemctl stop ungit')
-        sudo('systemctl stop inactive.timer')
-        sudo('rm -f /etc/systemd/system/inactive.service')
-        sudo('rm -f /etc/systemd/system/inactive.timer')
-        sudo('rm -rf /opt/inactivity')
-        sudo('npm -g uninstall ungit')
-        sudo('rm -f /etc/systemd/system/ungit.service')
-        sudo('systemctl daemon-reload')
+        conn.sudo('systemctl stop ungit')
+        conn.sudo('systemctl stop inactive.timer')
+        conn.sudo('rm -f /etc/systemd/system/inactive.service')
+        conn.sudo('rm -f /etc/systemd/system/inactive.timer')
+        conn.sudo('rm -rf /opt/inactivity')
+        conn.sudo('npm -g uninstall ungit')
+        conn.sudo('rm -f /etc/systemd/system/ungit.service')
+        conn.sudo('systemctl daemon-reload')
         remove_os_pkg(['nodejs', 'npm'])
-        sudo('sed -i "/spark.*.memory/d" /opt/spark/conf/spark-defaults.conf')
+        conn.sudo('sed -i "/spark.*.memory/d" /opt/spark/conf/spark-defaults.conf')
     except Exception as err:
         print('Error: {0}'.format(err))
         sys.exit(1)
@@ -54,15 +54,15 @@ def general_clean():
 
 def clean_jupyter():
     try:
-        sudo('systemctl stop jupyter-notebook')
-        sudo('pip3 uninstall -y notebook jupyter')
-        sudo('rm -rf /usr/local/share/jupyter/')
-        sudo('rm -rf /home/{}/.jupyter/'.format(args.os_user))
-        sudo('rm -rf /home/{}/.ipython/'.format(args.os_user))
-        sudo('rm -rf /home/{}/.ipynb_checkpoints/'.format(args.os_user))
-        sudo('rm -rf /home/{}/.local/share/jupyter/'.format(args.os_user))
-        sudo('rm -f /etc/systemd/system/jupyter-notebook.service')
-        sudo('systemctl daemon-reload')
+        conn.sudo('systemctl stop jupyter-notebook')
+        conn.sudo('pip3 uninstall -y notebook jupyter')
+        conn.sudo('rm -rf /usr/local/share/jupyter/')
+        conn.sudo('rm -rf /home/{}/.jupyter/'.format(args.os_user))
+        conn.sudo('rm -rf /home/{}/.ipython/'.format(args.os_user))
+        conn.sudo('rm -rf /home/{}/.ipynb_checkpoints/'.format(args.os_user))
+        conn.sudo('rm -rf /home/{}/.local/share/jupyter/'.format(args.os_user))
+        conn.sudo('rm -f /etc/systemd/system/jupyter-notebook.service')
+        conn.sudo('systemctl daemon-reload')
     except Exception as err:
         print('Error: {0}'.format(err))
         sys.exit(1)
@@ -70,14 +70,14 @@ def clean_jupyter():
 
 def clean_zeppelin():
     try:
-        sudo('systemctl stop zeppelin-notebook')
-        sudo('rm -rf /opt/zeppelin* /var/log/zeppelin /var/run/zeppelin')
+        conn.sudo('systemctl stop zeppelin-notebook')
+        conn.sudo('rm -rf /opt/zeppelin* /var/log/zeppelin /var/run/zeppelin')
         if os.environ['notebook_multiple_clusters'] == 'true':
-            sudo('systemctl stop livy-server')
-            sudo('rm -rf /opt/livy* /var/run/livy')
-            sudo('rm -f /etc/systemd/system/livy-server.service')
-        sudo('rm -f /etc/systemd/system/zeppelin-notebook.service')
-        sudo('systemctl daemon-reload')
+            conn.sudo('systemctl stop livy-server')
+            conn.sudo('rm -rf /opt/livy* /var/run/livy')
+            conn.sudo('rm -f /etc/systemd/system/livy-server.service')
+        conn.sudo('rm -f /etc/systemd/system/zeppelin-notebook.service')
+        conn.sudo('systemctl daemon-reload')
     except Exception as err:
         print('Error: {0}'.format(err))
         sys.exit(1)
@@ -86,8 +86,8 @@ def clean_zeppelin():
 def clean_rstudio():
     try:
         remove_os_pkg(['rstudio-server'])
-        sudo('rm -f /home/{}/.Rprofile'.format(args.os_user))
-        sudo('rm -f /home/{}/.Renviron'.format(args.os_user))
+        conn.sudo('rm -f /home/{}/.Rprofile'.format(args.os_user))
+        conn.sudo('rm -f /home/{}/.Renviron'.format(args.os_user))
     except Exception as err:
         print('Error:', str(err))
         sys.exit(1)
@@ -96,9 +96,9 @@ def clean_rstudio():
 def clean_tensor():
     try:
         clean_jupyter()
-        sudo('systemctl stop tensorboard')
-        sudo('systemctl disable tensorboard')
-        sudo('systemctl daemon-reload')
+        conn.sudo('systemctl stop tensorboard')
+        conn.sudo('systemctl disable tensorboard')
+        conn.sudo('systemctl daemon-reload')
     except Exception as err:
         print('Error: {0}'.format(err))
         sys.exit(1)
@@ -107,9 +107,9 @@ def clean_tensor():
 def clean_tensor_rstudio():
     try:
         clean_rstudio()
-        sudo('systemctl stop tensorboard')
-        sudo('systemctl disable tensorboard')
-        sudo('systemctl daemon-reload')
+        conn.sudo('systemctl stop tensorboard')
+        conn.sudo('systemctl disable tensorboard')
+        conn.sudo('systemctl daemon-reload')
     except Exception as err:
         print('Error: {0}'.format(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/os/common_configure_reverse_proxy.py b/infrastructure-provisioning/src/general/scripts/os/common_configure_reverse_proxy.py
index b6b5710..e809511 100644
--- a/infrastructure-provisioning/src/general/scripts/os/common_configure_reverse_proxy.py
+++ b/infrastructure-provisioning/src/general/scripts/os/common_configure_reverse_proxy.py
@@ -104,8 +104,8 @@ if __name__ == "__main__":
 
     print("Configure connections")
     datalab.fab.init_datalab_connection(args.edge_hostname, args.os_user, args.keyfile)
-    put('/tmp/{}.conf'.format(conf_file_name), '/usr/local/openresty/nginx/conf/locations', use_sudo=True)
-    sudo('service openresty reload')
+    conn.put('/tmp/{}.conf'.format(conf_file_name), '/usr/local/openresty/nginx/conf/locations', use_sudo=True)
+    conn.sudo('service openresty reload')
 
     datalab.fab.close_connection()
 
diff --git a/infrastructure-provisioning/src/general/scripts/os/common_configure_spark.py b/infrastructure-provisioning/src/general/scripts/os/common_configure_spark.py
index 4574e63..d1522ea 100644
--- a/infrastructure-provisioning/src/general/scripts/os/common_configure_spark.py
+++ b/infrastructure-provisioning/src/general/scripts/os/common_configure_spark.py
@@ -42,10 +42,10 @@ args = parser.parse_args()
 def update_spark_defaults_conf(spark_conf):
     try:
         timestamp = time.strftime("%a, %d %b %Y %H:%M:%S %Z", time.gmtime())
-        configs = sudo('find /opt/ /etc/ /usr/lib/ -name spark-defaults.conf -type f').split('\r\n')
+        configs = conn.sudo('find /opt/ /etc/ /usr/lib/ -name spark-defaults.conf -type f').split('\r\n')
         for conf in filter(None, configs):
-            sudo('''sed -i '/^# Updated/d' {0}'''.format(conf))
-            sudo('''echo "# Updated by DATALAB at {0} >> {1}'''.format(timestamp, conf))
+            conn.sudo('''sed -i '/^# Updated/d' {0}'''.format(conf))
+            conn.sudo('''echo "# Updated by DATALAB at {0} >> {1}'''.format(timestamp, conf))
     except Exception as err:
         print('Error: {0}'.format(err))
         sys.exit(1)
@@ -54,10 +54,10 @@ def update_spark_defaults_conf(spark_conf):
 def add_custom_spark_properties(cluster_name):
     try:
         if os.path.exists('/opt/{0}'.format(cluster_name)):
-            datalab_header = sudo('cat /tmp/{0}/notebook_spark-defaults_local.conf | grep "^#"'.format(cluster_name))
+            datalab_header = conn.sudo('cat /tmp/{0}/notebook_spark-defaults_local.conf | grep "^#"'.format(cluster_name))
             spark_configurations = ast.literal_eval(os.environ['spark_configurations'])
             new_spark_defaults = list()
-            spark_defaults = sudo('cat /opt/{0}/spark/conf/spark-defaults.conf'.format(cluster_name))
+            spark_defaults = conn.sudo('cat /opt/{0}/spark/conf/spark-defaults.conf'.format(cluster_name))
             current_spark_properties = spark_defaults.split('\n')
             for param in current_spark_properties:
                 if param.split(' ')[0] != '#':
@@ -70,11 +70,11 @@ def add_custom_spark_properties(cluster_name):
                                     new_spark_defaults.append(property + ' ' + config['Properties'][property])
                     new_spark_defaults.append(param)
             new_spark_defaults = set(new_spark_defaults)
-            sudo("echo '{0}' > /opt/{1}/spark/conf/spark-defaults.conf".format(datalab_header, cluster_name))
+            conn.sudo("echo '{0}' > /opt/{1}/spark/conf/spark-defaults.conf".format(datalab_header, cluster_name))
             for prop in new_spark_defaults:
                 prop = prop.rstrip()
-                sudo('echo "{0}" >> /opt/{1}/spark/conf/spark-defaults.conf'.format(prop, cluster_name))
-            sudo('sed -i "/^\s*$/d" /opt/{0}/spark/conf/spark-defaults.conf'.format(cluster_name))
+                conn.sudo('echo "{0}" >> /opt/{1}/spark/conf/spark-defaults.conf'.format(prop, cluster_name))
+            conn.sudo('sed -i "/^\s*$/d" /opt/{0}/spark/conf/spark-defaults.conf'.format(cluster_name))
     except Exception as err:
         print('Error: {0}'.format(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/os/configure_proxy_for_docker.py b/infrastructure-provisioning/src/general/scripts/os/configure_proxy_for_docker.py
index 3f67db0..32bea59 100644
--- a/infrastructure-provisioning/src/general/scripts/os/configure_proxy_for_docker.py
+++ b/infrastructure-provisioning/src/general/scripts/os/configure_proxy_for_docker.py
@@ -39,19 +39,19 @@ if __name__ == "__main__":
     datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
     print("Configuring proxy for docker")
     try:
-        sudo('mkdir -p /etc/systemd/system/docker.service.d')
-        sudo('touch {}'.format(http_file))
-        sudo('echo -e \'[Service] \nEnvironment=\"HTTP_PROXY=\'$http_proxy\'\"\' > {}'.format(http_file))
-        sudo('touch {}'.format(https_file))
-        sudo('echo -e \'[Service] \nEnvironment=\"HTTPS_PROXY=\'$http_proxy\'\"\' > {}'.format(https_file))
-        sudo('mkdir /home/{}/.docker'.format(args.os_user))
-        sudo('touch /home/{}/.docker/config.json'.format(args.os_user))
-        sudo(
+        conn.sudo('mkdir -p /etc/systemd/system/docker.service.d')
+        conn.sudo('touch {}'.format(http_file))
+        conn.sudo('echo -e \'[Service] \nEnvironment=\"HTTP_PROXY=\'$http_proxy\'\"\' > {}'.format(http_file))
+        conn.sudo('touch {}'.format(https_file))
+        conn.sudo('echo -e \'[Service] \nEnvironment=\"HTTPS_PROXY=\'$http_proxy\'\"\' > {}'.format(https_file))
+        conn.sudo('mkdir /home/{}/.docker'.format(args.os_user))
+        conn.sudo('touch /home/{}/.docker/config.json'.format(args.os_user))
+        conn.sudo(
             'echo -e \'{\n "proxies":\n {\n   "default":\n   {\n     "httpProxy":"\'$http_proxy\'",\n     "httpsProxy":"\'$http_proxy\'"\n   }\n }\n}\' > /home/datalab-user/.docker/config.json')
-        sudo('usermod -a -G docker ' + args.os_user)
-        sudo('update-rc.d docker defaults')
-        sudo('update-rc.d docker enable')
-        sudo('systemctl restart docker')
+        conn.sudo('usermod -a -G docker ' + args.os_user)
+        conn.sudo('update-rc.d docker defaults')
+        conn.sudo('update-rc.d docker enable')
+        conn.sudo('systemctl restart docker')
     except Exception as err:
         print('Error: {0}'.format(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/os/deeplearning_install_dataengine_kernels.py b/infrastructure-provisioning/src/general/scripts/os/deeplearning_install_dataengine_kernels.py
index 9279198..ea067e3 100644
--- a/infrastructure-provisioning/src/general/scripts/os/deeplearning_install_dataengine_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/os/deeplearning_install_dataengine_kernels.py
@@ -44,31 +44,31 @@ args = parser.parse_args()
 def configure_notebook(keyfile, hoststring):
     templates_dir = '/root/templates/'
     scripts_dir = '/root/scripts/'
-    run('mkdir -p /tmp/{}/'.format(args.cluster_name))
-    put(templates_dir + 'sparkmagic_config_template.json', '/tmp/sparkmagic_config_template.json')
+    conn.run('mkdir -p /tmp/{}/'.format(args.cluster_name))
+    conn.put(templates_dir + 'sparkmagic_config_template.json', '/tmp/sparkmagic_config_template.json')
     if not exists('/tmp/deeplearning_dataengine_create_configs.py'):
-        put(scripts_dir + 'deeplearning_dataengine_create_configs.py',
+        conn.put(scripts_dir + 'deeplearning_dataengine_create_configs.py',
             '/tmp/deeplearning_dataengine_create_configs.py')
-    # put(templates_dir + 'pyspark_dataengine_template.json', '/tmp/{}/pyspark_dataengine_template.json'.format(args.cluster_name))
-    # put(templates_dir + 'notebook_spark-defaults_local.conf', '/tmp/{}/notebook_spark-defaults_local.conf'.format(args.cluster_name))
+    # conn.put(templates_dir + 'pyspark_dataengine_template.json', '/tmp/{}/pyspark_dataengine_template.json'.format(args.cluster_name))
+    # conn.put(templates_dir + 'notebook_spark-defaults_local.conf', '/tmp/{}/notebook_spark-defaults_local.conf'.format(args.cluster_name))
     spark_master_ip = args.spark_master.split('//')[1].split(':')[0]
     # spark_memory = get_spark_memory(True, args.os_user, spark_master_ip, keyfile)
-    # run('echo "spark.executor.memory {0}m" >> /tmp/{1}/notebook_spark-defaults_local.conf'.format(spark_memory, args.cluster_name))
+    # conn.run('echo "spark.executor.memory {0}m" >> /tmp/{1}/notebook_spark-defaults_local.conf'.format(spark_memory, args.cluster_name))
     if not exists('/usr/local/bin/deeplearning_dataengine_create_configs.py'):
-        put(scripts_dir + 'deeplearning_dataengine_create_configs.py',
+        conn.put(scripts_dir + 'deeplearning_dataengine_create_configs.py',
             '/usr/local/bin/deeplearning_dataengine_create_configs.py', use_sudo=True)
-        sudo('chmod 755 /usr/local/bin/deeplearning_dataengine_create_configs.py')
+        conn.sudo('chmod 755 /usr/local/bin/deeplearning_dataengine_create_configs.py')
     if not exists('/usr/lib/python3.8/datalab/'):
-        sudo('mkdir -p /usr/lib/python3.8/datalab/')
-        put('/usr/lib/python3.8/datalab/*', '/usr/lib/python3.8/datalab/', use_sudo=True)
-        sudo('chmod a+x /usr/lib/python3.8/datalab/*')
+        conn.sudo('mkdir -p /usr/lib/python3.8/datalab/')
+        conn.put('/usr/lib/python3.8/datalab/*', '/usr/lib/python3.8/datalab/', use_sudo=True)
+        conn.sudo('chmod a+x /usr/lib/python3.8/datalab/*')
         if exists('/usr/lib64'):
-            sudo('mkdir -p /usr/lib64/python3.8')
-            sudo('ln -fs /usr/lib/python3.8/datalab /usr/lib64/python3.8/datalab')
+            conn.sudo('mkdir -p /usr/lib64/python3.8')
+            conn.sudo('ln -fs /usr/lib/python3.8/datalab /usr/lib64/python3.8/datalab')
 
 def create_inactivity_log(master_ip, hoststring):
     reworked_ip = master_ip.replace('.', '-')
-    sudo("date +%s > /opt/inactivity/{}_inactivity".format(reworked_ip))
+    conn.sudo("date +%s > /opt/inactivity/{}_inactivity".format(reworked_ip))
 
 if __name__ == "__main__":
     env.hosts = "{}".format(args.notebook_ip)
@@ -83,7 +83,7 @@ if __name__ == "__main__":
     create_inactivity_log(args.spark_master_ip, env.host_string)
     if 'spark_configurations' not in os.environ:
         os.environ['spark_configurations'] = '[]'
-    sudo('/usr/bin/python3 /usr/local/bin/deeplearning_dataengine_create_configs.py '
+    conn.sudo('/usr/bin/python3 /usr/local/bin/deeplearning_dataengine_create_configs.py '
          '--cluster_name {} --spark_version {} --hadoop_version {} --os_user {} --spark_master {} --region {} '
          '--datalake_enabled {} --spark_configurations "{}"'.format(args.cluster_name, args.spark_version,
                                                                   args.hadoop_version, args.os_user,  args.spark_master,
diff --git a/infrastructure-provisioning/src/general/scripts/os/install_additional_libs.py b/infrastructure-provisioning/src/general/scripts/os/install_additional_libs.py
index 14887b4..f801d22 100644
--- a/infrastructure-provisioning/src/general/scripts/os/install_additional_libs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/install_additional_libs.py
@@ -108,7 +108,7 @@ if __name__ == "__main__":
                 elif os.environ['conf_cloud_provider'] in ('gcp'):
                     manage_pkg('-y build-dep', 'remote', 'libcurl4-gnutls-dev libxml2-dev')
                     manage_pkg('-y install', 'remote', 'libcurl4-gnutls-dev libgit2-dev libxml2-dev')
-                sudo('R -e "install.packages(\'devtools\', repos = \'https://cloud.r-project.org\')"')
+                conn.sudo('R -e "install.packages(\'devtools\', repos = \'https://cloud.r-project.org\')"')
             status = install_r_pkg(pkgs['libraries']['r_pkg'])
             general_status = general_status + status
         except KeyError:
diff --git a/infrastructure-provisioning/src/general/scripts/os/jupyter_install_dataengine_kernels.py b/infrastructure-provisioning/src/general/scripts/os/jupyter_install_dataengine_kernels.py
index d41cde7..abdc78d 100644
--- a/infrastructure-provisioning/src/general/scripts/os/jupyter_install_dataengine_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/os/jupyter_install_dataengine_kernels.py
@@ -45,34 +45,34 @@ def configure_notebook(keyfile, hoststring):
     templates_dir = '/root/templates/'
     files_dir = '/root/files/'
     scripts_dir = '/root/scripts/'
-    run('mkdir -p /tmp/{}/'.format(args.cluster_name))
-    put(templates_dir + 'sparkmagic_config_template.json', '/tmp/sparkmagic_config_template.json')
+    conn.run('mkdir -p /tmp/{}/'.format(args.cluster_name))
+    conn.put(templates_dir + 'sparkmagic_config_template.json', '/tmp/sparkmagic_config_template.json')
     #put(templates_dir + 'pyspark_dataengine_template.json', '/tmp/{}/pyspark_dataengine_template.json'.format(args.cluster_name))
     #put(templates_dir + 'r_dataengine_template.json', '/tmp/{}/r_dataengine_template.json'.format(args.cluster_name))
     #put(templates_dir + 'toree_dataengine_template.json','/tmp/{}/toree_dataengine_template.json'.format(args.cluster_name))
-    # put(files_dir + 'toree_kernel.tar.gz', '/tmp/{}/toree_kernel.tar.gz'.format(args.cluster_name))
-    # put(templates_dir + 'toree_dataengine_template.json', '/tmp/{}/toree_dataengine_template.json'.format(args.cluster_name))
-    # put(templates_dir + 'run_template.sh', '/tmp/{}/run_template.sh'.format(args.cluster_name))
-    put(templates_dir + 'notebook_spark-defaults_local.conf',
+    # conn.put(files_dir + 'toree_kernel.tar.gz', '/tmp/{}/toree_kernel.tar.gz'.format(args.cluster_name))
+    # conn.put(templates_dir + 'toree_dataengine_template.json', '/tmp/{}/toree_dataengine_template.json'.format(args.cluster_name))
+    # conn.put(templates_dir + 'run_template.sh', '/tmp/{}/run_template.sh'.format(args.cluster_name))
+    conn.put(templates_dir + 'notebook_spark-defaults_local.conf',
         '/tmp/{}/notebook_spark-defaults_local.conf'.format(args.cluster_name))
     spark_master_ip = args.spark_master.split('//')[1].split(':')[0]
     # spark_memory = get_spark_memory(True, args.os_user, spark_master_ip, keyfile)
-    # run('echo "spark.executor.memory {0}m" >> /tmp/{1}/notebook_spark-defaults_local.conf'.format(spark_memory, args.cluster_name))
+    # conn.run('echo "spark.executor.memory {0}m" >> /tmp/{1}/notebook_spark-defaults_local.conf'.format(spark_memory, args.cluster_name))
     if not exists('/usr/local/bin/jupyter_dataengine_create_configs.py'):
-        put(scripts_dir + 'jupyter_dataengine_create_configs.py', '/usr/local/bin/jupyter_dataengine_create_configs.py',
+        conn.put(scripts_dir + 'jupyter_dataengine_create_configs.py', '/usr/local/bin/jupyter_dataengine_create_configs.py',
             use_sudo=True)
-        sudo('chmod 755 /usr/local/bin/jupyter_dataengine_create_configs.py')
+        conn.sudo('chmod 755 /usr/local/bin/jupyter_dataengine_create_configs.py')
     if not exists('/usr/lib/python3.8/datalab/'):
-        sudo('mkdir -p /usr/lib/python3.8/datalab/')
-        put('/usr/lib/python3.8/datalab/*', '/usr/lib/python3.8/datalab/', use_sudo=True)
-        sudo('chmod a+x /usr/lib/python3.8/datalab/*')
+        conn.sudo('mkdir -p /usr/lib/python3.8/datalab/')
+        conn.put('/usr/lib/python3.8/datalab/*', '/usr/lib/python3.8/datalab/', use_sudo=True)
+        conn.sudo('chmod a+x /usr/lib/python3.8/datalab/*')
         if exists('/usr/lib64'):
-            sudo('mkdir -p /usr/lib64/python3.8')
-            sudo('ln -fs /usr/lib/python3.8/datalab /usr/lib64/python3.8/datalab')
+            conn.sudo('mkdir -p /usr/lib64/python3.8')
+            conn.sudo('ln -fs /usr/lib/python3.8/datalab /usr/lib64/python3.8/datalab')
 
 def create_inactivity_log(master_ip, hoststring):
     reworked_ip = master_ip.replace('.', '-')
-    sudo("date +%s > /opt/inactivity/{}_inactivity".format(reworked_ip))
+    conn.sudo("date +%s > /opt/inactivity/{}_inactivity".format(reworked_ip))
 
 if __name__ == "__main__":
     env.hosts = "{}".format(args.notebook_ip)
@@ -88,7 +88,7 @@ if __name__ == "__main__":
         os.environ['spark_configurations'] = '[]'
     configure_notebook(args.keyfile, env.host_string)
     create_inactivity_log(args.spark_master_ip, env.host_string)
-    sudo('/usr/bin/python3 /usr/local/bin/jupyter_dataengine_create_configs.py '
+    conn.sudo('/usr/bin/python3 /usr/local/bin/jupyter_dataengine_create_configs.py '
          '--cluster_name {} --spark_version {} --hadoop_version {} --os_user {} \
          --spark_master {} --datalake_enabled {} --r_enabled {} --spark_configurations "{}"'.
          format(args.cluster_name, args.spark_version, args.hadoop_version, args.os_user, args.spark_master,
diff --git a/infrastructure-provisioning/src/general/scripts/os/jupyterlab_container_start.py b/infrastructure-provisioning/src/general/scripts/os/jupyterlab_container_start.py
index 0a63f88..47313bd 100644
--- a/infrastructure-provisioning/src/general/scripts/os/jupyterlab_container_start.py
+++ b/infrastructure-provisioning/src/general/scripts/os/jupyterlab_container_start.py
@@ -38,11 +38,11 @@ jupyterlab_dir = '/home/' + args.os_user + '/.jupyterlab/'
 def start_jupyterlab_container(jupyterlab_dir):
     try:
         with cd('{}'.format(jupyterlab_dir)):
-            run('docker build --network=host --file Dockerfile_jupyterlab -t jupyter-lab .'.format(args.os_user))
-            container_id = run('docker ps | awk \'NR==2{print $1}\'')
+            conn.run('docker build --network=host --file Dockerfile_jupyterlab -t jupyter-lab .'.format(args.os_user))
+            container_id = conn.run('docker ps | awk \'NR==2{print $1}\'')
             if container_id != '':
-                run('docker stop ' + container_id)
-            run('docker run -d --restart unless-stopped -p 8888:8888 \
+                conn.run('docker stop ' + container_id)
+            conn.run('docker run -d --restart unless-stopped -p 8888:8888 \
                      -v /home/{0}:/opt/legion/repository \
                      -v /home/{0}/.ssh/:/home/{0}/.ssh/ \
                      jupyter-lab:latest'.format(args.os_user))
diff --git a/infrastructure-provisioning/src/general/scripts/os/manage_git_creds.py b/infrastructure-provisioning/src/general/scripts/os/manage_git_creds.py
index a810588..6193da6 100644
--- a/infrastructure-provisioning/src/general/scripts/os/manage_git_creds.py
+++ b/infrastructure-provisioning/src/general/scripts/os/manage_git_creds.py
@@ -46,9 +46,9 @@ if __name__ == "__main__":
     git_creds = dict()
     try:
         if exists('/home/{}/.netrc'.format(args.os_user)):
-            run('rm .netrc')
+            conn.run('rm .netrc')
         if exists('/home/{}/.gitcreds'.format(args.os_user)):
-            run('rm .gitcreds')
+            conn.run('rm .gitcreds')
         git_creds = os.environ['git_creds']
     except KeyError as err:
         print('Error: {0}'.format(err))
@@ -72,14 +72,14 @@ if __name__ == "__main__":
         with open("new_netrc", "w+") as f:
             for conf in sorted(new_config, reverse=True):
                 f.writelines(conf + "\n")
-        put('new_netrc', '/home/{}/.netrc'.format(args.os_user))
+        conn.put('new_netrc', '/home/{}/.netrc'.format(args.os_user))
 
         creds = dict()
         with open("new_gitcreds", 'w') as gitcreds:
             for i in range(len(data)):
                 creds.update({data[i]['hostname']: [data[i]['username'], data[i]['email']]})
             gitcreds.write(json.dumps(creds))
-        put('new_gitcreds', '/home/{}/.gitcreds'.format(args.os_user))
+        conn.put('new_gitcreds', '/home/{}/.gitcreds'.format(args.os_user))
 
     except Exception as err:
         print('Error: {0}'.format(err))
diff --git a/infrastructure-provisioning/src/general/scripts/os/reconfigure_spark.py b/infrastructure-provisioning/src/general/scripts/os/reconfigure_spark.py
index c99da47..3897b7d 100644
--- a/infrastructure-provisioning/src/general/scripts/os/reconfigure_spark.py
+++ b/infrastructure-provisioning/src/general/scripts/os/reconfigure_spark.py
@@ -51,12 +51,12 @@ if __name__ == "__main__":
         configure_local_spark(jars_dir, templates_dir, memory_type)
     elif args.spark_type == 'dataengine':
         if not exists('/usr/local/bin/notebook_reconfigure_dataengine_spark.py'):
-            put('/root/scripts/notebook_reconfigure_dataengine_spark.py',
+            conn.put('/root/scripts/notebook_reconfigure_dataengine_spark.py',
                 '/tmp/notebook_reconfigure_dataengine_spark.py')
-            sudo('mv /tmp/notebook_reconfigure_dataengine_spark.py '
+            conn.sudo('mv /tmp/notebook_reconfigure_dataengine_spark.py '
                  '/usr/local/bin/notebook_reconfigure_dataengine_spark.py')
-        sudo('mkdir -p /tmp/{}'.format(args.cluster_name))
-        put('{}notebook_spark-defaults_local.conf'.format(templates_dir),
+        conn.sudo('mkdir -p /tmp/{}'.format(args.cluster_name))
+        conn.put('{}notebook_spark-defaults_local.conf'.format(templates_dir),
             '/tmp/{}/notebook_spark-defaults_local.conf'.format(args.cluster_name), use_sudo=True)
         cluster_dir = '/opt/' + args.cluster_name + '/'
         if 'azure_datalake_enable' in os.environ:
@@ -65,7 +65,7 @@ if __name__ == "__main__":
             datalake_enabled = 'false'
         if 'spark_configurations' not in os.environ:
             os.environ['spark_configurations'] = '[]'
-        sudo('/usr/bin/python3 /usr/local/bin/notebook_reconfigure_dataengine_spark.py --cluster_name {0} '
+        conn.sudo('/usr/bin/python3 /usr/local/bin/notebook_reconfigure_dataengine_spark.py --cluster_name {0} '
              '--jars_dir {1} --cluster_dir {2} --datalake_enabled {3} --spark_configurations "{4}"'.format(
               args.cluster_name, jars_dir, cluster_dir, datalake_enabled, os.environ['spark_configurations']))
     datalab.fab.close_connection()
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/general/scripts/os/rstudio_install_dataengine_kernels.py b/infrastructure-provisioning/src/general/scripts/os/rstudio_install_dataengine_kernels.py
index 18c6313..a4a0b19 100644
--- a/infrastructure-provisioning/src/general/scripts/os/rstudio_install_dataengine_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/os/rstudio_install_dataengine_kernels.py
@@ -44,28 +44,28 @@ args = parser.parse_args()
 def configure_notebook(keyfile, hoststring):
     scripts_dir = '/root/scripts/'
     templates_dir = '/root/templates/'
-    run('mkdir -p /tmp/{}/'.format(args.cluster_name))
-    put(templates_dir + 'notebook_spark-defaults_local.conf',
+    conn.run('mkdir -p /tmp/{}/'.format(args.cluster_name))
+    conn.put(templates_dir + 'notebook_spark-defaults_local.conf',
         '/tmp/{}/notebook_spark-defaults_local.conf'.format(args.cluster_name))
     spark_master_ip = args.spark_master.split('//')[1].split(':')[0]
     spark_memory = get_spark_memory(True, args.os_user, spark_master_ip, keyfile)
-    run('echo "spark.executor.memory {0}m" >> /tmp/{1}/notebook_spark-defaults_local.conf'.format(spark_memory,
+    conn.run('echo "spark.executor.memory {0}m" >> /tmp/{1}/notebook_spark-defaults_local.conf'.format(spark_memory,
                                                                                                   args.cluster_name))
     if not exists('/usr/local/bin/rstudio_dataengine_create_configs.py'):
-        put(scripts_dir + 'rstudio_dataengine_create_configs.py', '/usr/local/bin/rstudio_dataengine_create_configs.py',
+        conn.put(scripts_dir + 'rstudio_dataengine_create_configs.py', '/usr/local/bin/rstudio_dataengine_create_configs.py',
             use_sudo=True)
-        sudo('chmod 755 /usr/local/bin/rstudio_dataengine_create_configs.py')
+        conn.sudo('chmod 755 /usr/local/bin/rstudio_dataengine_create_configs.py')
     if not exists('/usr/lib/python3.8/datalab/'):
-        sudo('mkdir -p /usr/lib/python3.8/datalab/')
-        put('/usr/lib/python3.8/datalab/*', '/usr/lib/python3.8/datalab/', use_sudo=True)
-        sudo('chmod a+x /usr/lib/python3.8/datalab/*')
+        conn.sudo('mkdir -p /usr/lib/python3.8/datalab/')
+        conn.put('/usr/lib/python3.8/datalab/*', '/usr/lib/python3.8/datalab/', use_sudo=True)
+        conn.sudo('chmod a+x /usr/lib/python3.8/datalab/*')
         if exists('/usr/lib64'):
-            sudo('mkdir -p /usr/lib64/python3.8')
-            sudo('ln -fs /usr/lib/python3.8/datalab /usr/lib64/python3.8/datalab')
+            conn.sudo('mkdir -p /usr/lib64/python3.8')
+            conn.sudo('ln -fs /usr/lib/python3.8/datalab /usr/lib64/python3.8/datalab')
 
 def create_inactivity_log(master_ip, hoststring):
     reworked_ip = master_ip.replace('.', '-')
-    sudo("date +%s > /opt/inactivity/{}_inactivity".format(reworked_ip))
+    conn.sudo("date +%s > /opt/inactivity/{}_inactivity".format(reworked_ip))
 
 if __name__ == "__main__":
     env.hosts = "{}".format(args.notebook_ip)
@@ -80,7 +80,7 @@ if __name__ == "__main__":
         os.environ['spark_configurations'] = '[]'
     configure_notebook(args.keyfile, env.host_string)
     create_inactivity_log(args.spark_master_ip, env.host_string)
-    sudo('/usr/bin/python3 /usr/local/bin/rstudio_dataengine_create_configs.py '
+    conn.sudo('/usr/bin/python3 /usr/local/bin/rstudio_dataengine_create_configs.py '
          '--cluster_name {} --spark_version {} --hadoop_version {} --os_user {} --spark_master {} --region {} '
          '--datalake_enabled {} --spark_configurations "{}"'.
          format(args.cluster_name, args.spark_version, args.hadoop_version, args.os_user, args.spark_master, region,
diff --git a/infrastructure-provisioning/src/general/scripts/os/superset_start.py b/infrastructure-provisioning/src/general/scripts/os/superset_start.py
index c3f344b..afe38ae 100644
--- a/infrastructure-provisioning/src/general/scripts/os/superset_start.py
+++ b/infrastructure-provisioning/src/general/scripts/os/superset_start.py
@@ -38,13 +38,13 @@ superset_dir = '/home/' + args.os_user + '/incubator-superset/contrib/docker'
 def start_superset(superset_dir):
     try:
         with cd('{}'.format(superset_dir)):
-            sudo('docker-compose run --rm superset ./docker-init.sh')
-        sudo('cp /opt/datalab/templates/superset-notebook.service /tmp/')
-        sudo('sed -i \'s/OS_USER/{}/g\' /tmp/superset-notebook.service'.format(args.os_user))
-        sudo('cp /tmp/superset-notebook.service /etc/systemd/system/')
-        sudo('systemctl daemon-reload')
-        sudo('systemctl enable superset-notebook')
-        sudo('systemctl start superset-notebook')
+            conn.sudo('docker-compose run --rm superset ./docker-init.sh')
+        conn.sudo('cp /opt/datalab/templates/superset-notebook.service /tmp/')
+        conn.sudo('sed -i \'s/OS_USER/{}/g\' /tmp/superset-notebook.service'.format(args.os_user))
+        conn.sudo('cp /tmp/superset-notebook.service /etc/systemd/system/')
+        conn.sudo('systemctl daemon-reload')
+        conn.sudo('systemctl enable superset-notebook')
+        conn.sudo('systemctl start superset-notebook')
     except: sys.exit(1)
 
 if __name__ == "__main__":
diff --git a/infrastructure-provisioning/src/general/scripts/os/tensor-rstudio_install_dataengine_kernels.py b/infrastructure-provisioning/src/general/scripts/os/tensor-rstudio_install_dataengine_kernels.py
index 3766073..36d358e 100644
--- a/infrastructure-provisioning/src/general/scripts/os/tensor-rstudio_install_dataengine_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/os/tensor-rstudio_install_dataengine_kernels.py
@@ -44,28 +44,28 @@ args = parser.parse_args()
 def configure_notebook(keyfile, hoststring):
     scripts_dir = '/root/scripts/'
     templates_dir = '/root/templates/'
-    run('mkdir -p /tmp/{}/'.format(args.cluster_name))
-    put(templates_dir + 'notebook_spark-defaults_local.conf',
+    conn.run('mkdir -p /tmp/{}/'.format(args.cluster_name))
+    conn.put(templates_dir + 'notebook_spark-defaults_local.conf',
         '/tmp/{}/notebook_spark-defaults_local.conf'.format(args.cluster_name))
     spark_master_ip = args.spark_master.split('//')[1].split(':')[0]
     spark_memory = get_spark_memory(True, args.os_user, spark_master_ip, keyfile)
-    run('echo "spark.executor.memory {0}m" >> /tmp/{1}/notebook_spark-defaults_local.conf'.format(spark_memory,
+    conn.run('echo "spark.executor.memory {0}m" >> /tmp/{1}/notebook_spark-defaults_local.conf'.format(spark_memory,
                                                                                                   args.cluster_name))
     if not exists('/usr/local/bin/tensor-rstudio_dataengine_create_configs.py'):
-        put(scripts_dir + 'tensor-rstudio_dataengine_create_configs.py',
+        conn.put(scripts_dir + 'tensor-rstudio_dataengine_create_configs.py',
             '/usr/local/bin/tensor-rstudio_dataengine_create_configs.py', use_sudo=True)
-        sudo('chmod 755 /usr/local/bin/tensor-rstudio_dataengine_create_configs.py')
+        conn.sudo('chmod 755 /usr/local/bin/tensor-rstudio_dataengine_create_configs.py')
     if not exists('/usr/lib/python3.8/datalab/'):
-        sudo('mkdir -p /usr/lib/python3.8/datalab/')
-        put('/usr/lib/python3.8/datalab/*', '/usr/lib/python3.8/datalab/', use_sudo=True)
-        sudo('chmod a+x /usr/lib/python3.8/datalab/*')
+        conn.sudo('mkdir -p /usr/lib/python3.8/datalab/')
+        conn.put('/usr/lib/python3.8/datalab/*', '/usr/lib/python3.8/datalab/', use_sudo=True)
+        conn.sudo('chmod a+x /usr/lib/python3.8/datalab/*')
         if exists('/usr/lib64'):
-            sudo('mkdir -p /usr/lib64/python3.8')
-            sudo('ln -fs /usr/lib/python3.8/datalab /usr/lib64/python3.8/datalab')
+            conn.sudo('mkdir -p /usr/lib64/python3.8')
+            conn.sudo('ln -fs /usr/lib/python3.8/datalab /usr/lib64/python3.8/datalab')
 
 def create_inactivity_log(master_ip, hoststring):
     reworked_ip = master_ip.replace('.', '-')
-    sudo("date +%s > /opt/inactivity/{}_inactivity".format(reworked_ip))
+    conn.sudo("date +%s > /opt/inactivity/{}_inactivity".format(reworked_ip))
 
 if __name__ == "__main__":
     env.hosts = "{}".format(args.notebook_ip)
@@ -80,7 +80,7 @@ if __name__ == "__main__":
         os.environ['spark_configurations'] = '[]'
     configure_notebook(args.keyfile, env.host_string)
     create_inactivity_log(args.spark_master_ip, env.host_string)
-    sudo('/usr/bin/python3 /usr/local/bin/tensor-rstudio_dataengine_create_configs.py '
+    conn.sudo('/usr/bin/python3 /usr/local/bin/tensor-rstudio_dataengine_create_configs.py '
          '--cluster_name {} --spark_version {} --hadoop_version {} --os_user {} --spark_master {} --region {} '
          '--datalake_enabled {} --spark_configurations "{}"'.
          format(args.cluster_name, args.spark_version, args.hadoop_version, args.os_user, args.spark_master, region,
diff --git a/infrastructure-provisioning/src/general/scripts/os/tensor_install_dataengine_kernels.py b/infrastructure-provisioning/src/general/scripts/os/tensor_install_dataengine_kernels.py
index c93a220..0e7b7b0 100644
--- a/infrastructure-provisioning/src/general/scripts/os/tensor_install_dataengine_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/os/tensor_install_dataengine_kernels.py
@@ -44,28 +44,28 @@ args = parser.parse_args()
 def configure_notebook(keyfile, hoststring):
     templates_dir = '/root/templates/'
     scripts_dir = '/root/scripts/'
-    run('mkdir -p /tmp/{}/'.format(args.cluster_name))
-    put(templates_dir + 'sparkmagic_config_template.json', '/tmp/sparkmagic_config_template.json')
-    # put(templates_dir + 'pyspark_dataengine_template.json', '/tmp/{}/pyspark_dataengine_template.json'.format(args.cluster_name))
-    # put(templates_dir + 'notebook_spark-defaults_local.conf', '/tmp/{}/notebook_spark-defaults_local.conf'.format(args.cluster_name))
+    conn.run('mkdir -p /tmp/{}/'.format(args.cluster_name))
+    conn.put(templates_dir + 'sparkmagic_config_template.json', '/tmp/sparkmagic_config_template.json')
+    # conn.put(templates_dir + 'pyspark_dataengine_template.json', '/tmp/{}/pyspark_dataengine_template.json'.format(args.cluster_name))
+    # conn.put(templates_dir + 'notebook_spark-defaults_local.conf', '/tmp/{}/notebook_spark-defaults_local.conf'.format(args.cluster_name))
     spark_master_ip = args.spark_master.split('//')[1].split(':')[0]
     # spark_memory = get_spark_memory(True, args.os_user, spark_master_ip, keyfile)
-    # run('echo "spark.executor.memory {0}m" >> /tmp/{1}/notebook_spark-defaults_local.conf'.format(spark_memory, args.cluster_name))
+    # conn.run('echo "spark.executor.memory {0}m" >> /tmp/{1}/notebook_spark-defaults_local.conf'.format(spark_memory, args.cluster_name))
     if not exists('/usr/local/bin/tensor_dataengine_create_configs.py'):
-        put(scripts_dir + 'tensor_dataengine_create_configs.py', '/usr/local/bin/tensor_dataengine_create_configs.py',
+        conn.put(scripts_dir + 'tensor_dataengine_create_configs.py', '/usr/local/bin/tensor_dataengine_create_configs.py',
             use_sudo=True)
-        sudo('chmod 755 /usr/local/bin/tensor_dataengine_create_configs.py')
+        conn.sudo('chmod 755 /usr/local/bin/tensor_dataengine_create_configs.py')
     if not exists('/usr/lib/python3.8/datalab/'):
-        sudo('mkdir -p /usr/lib/python3.8/datalab/')
-        put('/usr/lib/python3.8/datalab/*', '/usr/lib/python3.8/datalab/', use_sudo=True)
-        sudo('chmod a+x /usr/lib/python3.8/datalab/*')
+        conn.sudo('mkdir -p /usr/lib/python3.8/datalab/')
+        conn.put('/usr/lib/python3.8/datalab/*', '/usr/lib/python3.8/datalab/', use_sudo=True)
+        conn.sudo('chmod a+x /usr/lib/python3.8/datalab/*')
         if exists('/usr/lib64'):
-            sudo('mkdir -p /usr/lib64/python3.8')
-            sudo('ln -fs /usr/lib/python3.8/datalab /usr/lib64/python3.8/datalab')
+            conn.sudo('mkdir -p /usr/lib64/python3.8')
+            conn.sudo('ln -fs /usr/lib/python3.8/datalab /usr/lib64/python3.8/datalab')
 
 def create_inactivity_log(master_ip, hoststring):
     reworked_ip = master_ip.replace('.', '-')
-    sudo("date +%s > /opt/inactivity/{}_inactivity".format(reworked_ip))
+    conn.sudo("date +%s > /opt/inactivity/{}_inactivity".format(reworked_ip))
 
 if __name__ == "__main__":
     env.hosts = "{}".format(args.notebook_ip)
@@ -80,7 +80,7 @@ if __name__ == "__main__":
         os.environ['spark_configurations'] = '[]'
     configure_notebook(args.keyfile, env.host_string)
     create_inactivity_log(args.spark_master_ip, env.host_string)
-    sudo('/usr/bin/python3 /usr/local/bin/tensor_dataengine_create_configs.py '
+    conn.sudo('/usr/bin/python3 /usr/local/bin/tensor_dataengine_create_configs.py '
          '--cluster_name {} --spark_version {} --hadoop_version {} --os_user {} --spark_master {} --region {} '
          '--datalake_enabled {} --spark_configurations "{}"'.
          format(args.cluster_name, args.spark_version, args.hadoop_version, args.os_user, args.spark_master, region,
diff --git a/infrastructure-provisioning/src/general/scripts/os/update_inactivity_on_start.py b/infrastructure-provisioning/src/general/scripts/os/update_inactivity_on_start.py
index 0609a5d..3691ba8 100644
--- a/infrastructure-provisioning/src/general/scripts/os/update_inactivity_on_start.py
+++ b/infrastructure-provisioning/src/general/scripts/os/update_inactivity_on_start.py
@@ -43,6 +43,6 @@ if __name__ == "__main__":
     else:
         kernel = args.cluster_ip.replace('.', '-')
 
-    sudo("date +%s > /opt/inactivity/{}_inactivity".format(kernel))
+    conn.sudo("date +%s > /opt/inactivity/{}_inactivity".format(kernel))
 
     datalab.fab.close_connection()
diff --git a/infrastructure-provisioning/src/general/scripts/os/zeppelin_install_dataengine_kernels.py b/infrastructure-provisioning/src/general/scripts/os/zeppelin_install_dataengine_kernels.py
index 6d0b0f9..1f3f448 100644
--- a/infrastructure-provisioning/src/general/scripts/os/zeppelin_install_dataengine_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/os/zeppelin_install_dataengine_kernels.py
@@ -44,34 +44,34 @@ args = parser.parse_args()
 def configure_notebook(keyfile, hoststring):
     templates_dir = '/root/templates/'
     scripts_dir = '/root/scripts/'
-    run('mkdir -p /tmp/{}/'.format(args.cluster_name))
+    conn.run('mkdir -p /tmp/{}/'.format(args.cluster_name))
     if os.environ['notebook_multiple_clusters'] == 'true':
-        put(templates_dir + 'dataengine_interpreter_livy.json', '/tmp/{}/dataengine_interpreter.json'.format(args.cluster_name))
+        conn.put(templates_dir + 'dataengine_interpreter_livy.json', '/tmp/{}/dataengine_interpreter.json'.format(args.cluster_name))
     else:
-        put(templates_dir + 'dataengine_interpreter_spark.json',
+        conn.put(templates_dir + 'dataengine_interpreter_spark.json',
             '/tmp/{}/dataengine_interpreter.json'.format(args.cluster_name))
-    put(templates_dir + 'notebook_spark-defaults_local.conf',
+    conn.put(templates_dir + 'notebook_spark-defaults_local.conf',
         '/tmp/{}/notebook_spark-defaults_local.conf'.format(args.cluster_name))
     spark_master_ip = args.spark_master.split('//')[1].split(':')[0]
     spark_memory = get_spark_memory(True, args.os_user, spark_master_ip, keyfile)
-    run('sed -i "s|EXECUTOR_MEMORY|{}m|g " /tmp/{}/dataengine_interpreter.json'.format(spark_memory, args.cluster_name))
-    run('echo "spark.executor.memory {0}m" >> /tmp/{1}/notebook_spark-defaults_local.conf'.format(spark_memory,
+    conn.run('sed -i "s|EXECUTOR_MEMORY|{}m|g " /tmp/{}/dataengine_interpreter.json'.format(spark_memory, args.cluster_name))
+    conn.run('echo "spark.executor.memory {0}m" >> /tmp/{1}/notebook_spark-defaults_local.conf'.format(spark_memory,
                                                                                                   args.cluster_name))
     if not exists('/usr/local/bin/zeppelin_dataengine_create_configs.py'):
-        put(scripts_dir + 'zeppelin_dataengine_create_configs.py',
+        conn.put(scripts_dir + 'zeppelin_dataengine_create_configs.py',
             '/usr/local/bin/zeppelin_dataengine_create_configs.py', use_sudo=True)
-        sudo('chmod 755 /usr/local/bin/zeppelin_dataengine_create_configs.py')
+        conn.sudo('chmod 755 /usr/local/bin/zeppelin_dataengine_create_configs.py')
     if not exists('/usr/lib/python3.8/datalab/'):
-        sudo('mkdir -p /usr/lib/python3.8/datalab/')
-        put('/usr/lib/python3.8/datalab/*', '/usr/lib/python3.8/datalab/', use_sudo=True)
-        sudo('chmod a+x /usr/lib/python3.8/datalab/*')
+        conn.sudo('mkdir -p /usr/lib/python3.8/datalab/')
+        conn.put('/usr/lib/python3.8/datalab/*', '/usr/lib/python3.8/datalab/', use_sudo=True)
+        conn.sudo('chmod a+x /usr/lib/python3.8/datalab/*')
         if exists('/usr/lib64'):
-            sudo('mkdir -p /usr/lib64/python3.8')
-            sudo('ln -fs /usr/lib/python3.8/datalab /usr/lib64/python3.8/datalab')
+            conn.sudo('mkdir -p /usr/lib64/python3.8')
+            conn.sudo('ln -fs /usr/lib/python3.8/datalab /usr/lib64/python3.8/datalab')
 
 def create_inactivity_log(master_ip, hoststring):
     reworked_ip = master_ip.replace('.', '-')
-    sudo("date +%s > /opt/inactivity/{}_inactivity".format(reworked_ip))
+    conn.sudo("date +%s > /opt/inactivity/{}_inactivity".format(reworked_ip))
 
 if __name__ == "__main__":
     env.hosts = "{}".format(args.notebook_ip)
@@ -88,7 +88,7 @@ if __name__ == "__main__":
     create_inactivity_log(args.spark_master_ip, env.host_string)
     livy_version = os.environ['notebook_livy_version']
     r_enabled = os.environ['notebook_r_enabled']
-    sudo('/usr/bin/python3 /usr/local/bin/zeppelin_dataengine_create_configs.py '
+    conn.sudo('/usr/bin/python3 /usr/local/bin/zeppelin_dataengine_create_configs.py '
          '--cluster_name {} --spark_version {} --hadoop_version {} --os_user {} --spark_master {} --keyfile {} \
          --notebook_ip {} --livy_version {} --multiple_clusters {} --region {} --datalake_enabled {} '
          '--r_enabled {} --spark_configurations "{}"'.
diff --git a/infrastructure-provisioning/src/jupyter/scripts/configure_jupyter_node.py b/infrastructure-provisioning/src/jupyter/scripts/configure_jupyter_node.py
index 280dbf8..954a6ec 100644
--- a/infrastructure-provisioning/src/jupyter/scripts/configure_jupyter_node.py
+++ b/infrastructure-provisioning/src/jupyter/scripts/configure_jupyter_node.py
@@ -78,7 +78,7 @@ if __name__ == "__main__":
     print("Prepare .ensure directory")
     try:
         if not exists('/home/' + args.os_user + '/.ensure_dir'):
-            sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
+            conn.sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
     except:
         sys.exit(1)
     print("Mount additional volume")
@@ -102,7 +102,7 @@ if __name__ == "__main__":
     # INSTALL SPARK AND CLOUD STORAGE JARS FOR SPARK
     print("Install local Spark")
     ensure_local_spark(args.os_user, spark_link, spark_version, hadoop_version, local_spark_path)
-    local_spark_scala_version = sudo('spark-submit --version 2>&1 | grep -o -P "Scala version \K.{0,7}"')
+    local_spark_scala_version = conn.sudo('spark-submit --version 2>&1 | grep -o -P "Scala version \K.{0,7}"')
     print("Install storage jars")
     ensure_local_jars(args.os_user, jars_dir)
     print("Configure local Spark")
diff --git a/infrastructure-provisioning/src/jupyterlab/scripts/configure_jupyterlab_node.py b/infrastructure-provisioning/src/jupyterlab/scripts/configure_jupyterlab_node.py
index 5eab9ab..4b6e8ec 100644
--- a/infrastructure-provisioning/src/jupyterlab/scripts/configure_jupyterlab_node.py
+++ b/infrastructure-provisioning/src/jupyterlab/scripts/configure_jupyterlab_node.py
@@ -86,7 +86,7 @@ if __name__ == "__main__":
     print("Prepare .ensure directory")
     try:
         if not exists('/home/' + args.os_user + '/.ensure_dir'):
-            sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
+            conn.sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
     except:
         sys.exit(1)
     print("Mount additional volume")
diff --git a/infrastructure-provisioning/src/rstudio/scripts/configure_rstudio_node.py b/infrastructure-provisioning/src/rstudio/scripts/configure_rstudio_node.py
index 42e99a7..35c09b66 100644
--- a/infrastructure-provisioning/src/rstudio/scripts/configure_rstudio_node.py
+++ b/infrastructure-provisioning/src/rstudio/scripts/configure_rstudio_node.py
@@ -72,7 +72,7 @@ if __name__ == "__main__":
     print("Prepare .ensure directory")
     try:
         if not exists('/home/' + args.os_user + '/.ensure_dir'):
-            sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
+            conn.sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
     except:
         sys.exit(1)
     print("Mount additional volume")
diff --git a/infrastructure-provisioning/src/ssn/scripts/configure_docker.py b/infrastructure-provisioning/src/ssn/scripts/configure_docker.py
index 667f727..0d21450 100644
--- a/infrastructure-provisioning/src/ssn/scripts/configure_docker.py
+++ b/infrastructure-provisioning/src/ssn/scripts/configure_docker.py
@@ -54,7 +54,7 @@ def modify_conf_file(args):
     local('scp -r -i {} /project_tree/* {}:{}sources/'.format(args.keyfile, env.host_string, args.datalab_path))
     local('scp -i {} /root/scripts/configure_conf_file.py {}:/tmp/configure_conf_file.py'.format(args.keyfile,
                                                                                                  env.host_string))
-    sudo("python3 /tmp/configure_conf_file.py --datalab_dir {} --variables_list '{}'".format(
+    conn.sudo("python3 /tmp/configure_conf_file.py --datalab_dir {} --variables_list '{}'".format(
         args.datalab_path, json.dumps(variables_list)))
 
 
@@ -63,10 +63,10 @@ def download_toree():
     tarball_link = 'https://archive.apache.org/dist/incubator/toree/0.3.0-incubating/toree/toree-0.3.0-incubating-bin.tar.gz'
     jar_link = 'https://repo1.maven.org/maven2/org/apache/toree/toree-assembly/0.3.0-incubating/toree-assembly-0.3.0-incubating.jar'
     try:
-        run('wget {}'.format(tarball_link))
-        run('wget {}'.format(jar_link))
-        run('mv toree-0.3.0-incubating-bin.tar.gz {}toree_kernel.tar.gz'.format(toree_path))
-        run('mv toree-assembly-0.3.0-incubating.jar {}toree-assembly-0.3.0.jar'.format(toree_path))
+        conn.run('wget {}'.format(tarball_link))
+        conn.run('wget {}'.format(jar_link))
+        conn.run('mv toree-0.3.0-incubating-bin.tar.gz {}toree_kernel.tar.gz'.format(toree_path))
+        conn.run('mv toree-assembly-0.3.0-incubating.jar {}toree-assembly-0.3.0.jar'.format(toree_path))
     except Exception as err:
         traceback.print_exc()
         print('Failed to download toree: ', str(err))
@@ -75,22 +75,22 @@ def download_toree():
 
 def add_china_repository(datalab_path):
     with cd('{}sources/infrastructure-provisioning/src/base/'.format(datalab_path)):
-        sudo('sed -i "/pip install/s/$/ -i https\:\/\/{0}\/simple --trusted-host {0} --timeout 60000/g" '
+        conn.sudo('sed -i "/pip install/s/$/ -i https\:\/\/{0}\/simple --trusted-host {0} --timeout 60000/g" '
              'Dockerfile'.format(os.environ['conf_pypi_mirror']))
-        sudo('sed -i "/pip install/s/jupyter/ipython==5.0.0 jupyter==1.0.0/g" Dockerfile')
-        sudo('sed -i "22i COPY general/files/os/debian/sources.list /etc/apt/sources.list" Dockerfile')
+        conn.sudo('sed -i "/pip install/s/jupyter/ipython==5.0.0 jupyter==1.0.0/g" Dockerfile')
+        conn.sudo('sed -i "22i COPY general/files/os/debian/sources.list /etc/apt/sources.list" Dockerfile')
 
 def login_in_gcr(os_user, gcr_creds, odahu_image, datalab_path, cloud_provider):
     if gcr_creds != '':
         try:
             if os.environ['conf_cloud_provider'] != 'gcp':
                 try:
-                    sudo('echo "deb [signed-by=/usr/share/keyrings/cloud.google.gpg] https://packages.cloud.google.com/apt '
+                    conn.sudo('echo "deb [signed-by=/usr/share/keyrings/cloud.google.gpg] https://packages.cloud.google.com/apt '
                           'cloud-sdk main" | tee -a /etc/apt/sources.list.d/google-cloud-sdk.list')
-                    sudo('apt-get -y install apt-transport-https ca-certificates gnupg')
-                    sudo('curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key --keyring /usr/share/keyrings/cloud.google.gpg add -')
-                    sudo('apt-get update')
-                    sudo('apt-get -y install google-cloud-sdk')
+                    conn.sudo('apt-get -y install apt-transport-https ca-certificates gnupg')
+                    conn.sudo('curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key --keyring /usr/share/keyrings/cloud.google.gpg add -')
+                    conn.sudo('apt-get update')
+                    conn.sudo('apt-get -y install google-cloud-sdk')
                 except Exception as err:
                     traceback.print_exc()
                     print('Failed to install gcloud: ', str(err))
@@ -99,9 +99,9 @@ def login_in_gcr(os_user, gcr_creds, odahu_image, datalab_path, cloud_provider):
                 with open('/tmp/config', 'w') as f:
                     f.write(base64.b64decode(gcr_creds))
                 local('scp -i {} /tmp/config {}:/tmp/config'.format(args.keyfile, env.host_string, os_user))
-                sudo('mkdir /home/{}/.docker'.format(os_user))
-                sudo('cp /tmp/config /home/{}/.docker/config.json'.format(os_user))
-                sudo('sed -i "s|ODAHU_IMAGE|{}|" {}sources/infrastructure-provisioning/src/general/files/{}/odahu_Dockerfile'
+                conn.sudo('mkdir /home/{}/.docker'.format(os_user))
+                conn.sudo('cp /tmp/config /home/{}/.docker/config.json'.format(os_user))
+                conn.sudo('sed -i "s|ODAHU_IMAGE|{}|" {}sources/infrastructure-provisioning/src/general/files/{}/odahu_Dockerfile'
                      .format(odahu_image, datalab_path, cloud_provider))
             except Exception as err:
                 traceback.print_exc()
@@ -117,27 +117,27 @@ def build_docker_images(image_list, region, datalab_path):
         if os.environ['conf_cloud_provider'] == 'azure':
             local('scp -i {} /root/azure_auth.json {}:{}sources/infrastructure-provisioning/src/base/'
                   'azure_auth.json'.format(args.keyfile, env.host_string, args.datalab_path))
-            sudo('cp {0}sources/infrastructure-provisioning/src/base/azure_auth.json '
+            conn.sudo('cp {0}sources/infrastructure-provisioning/src/base/azure_auth.json '
                  '/home/{1}/keys/azure_auth.json'.format(args.datalab_path, args.os_user))
         if region == 'cn-north-1':
             add_china_repository(datalab_path)
         for image in image_list:
             name = image['name']
             tag = image['tag']
-            sudo('cd {0}sources/infrastructure-provisioning/src/; cp general/files/{1}/{2}_description.json '
+            conn.sudo('cd {0}sources/infrastructure-provisioning/src/; cp general/files/{1}/{2}_description.json '
                  '{2}/description.json'.format(args.datalab_path, args.cloud_provider, name))
             if name == 'base':
-                sudo("cd {4}sources/infrastructure-provisioning/src/; docker build --build-arg OS={2} "
+                conn.sudo("cd {4}sources/infrastructure-provisioning/src/; docker build --build-arg OS={2} "
                      "--build-arg SRC_PATH="" --file general/files/{3}/{0}_Dockerfile "
                      "-t docker.datalab-{0}:{1} .".format(name, tag, args.os_family, args.cloud_provider,
                                                           args.datalab_path))
             else:
-                sudo("cd {4}sources/infrastructure-provisioning/src/; docker build --build-arg OS={2} "
+                conn.sudo("cd {4}sources/infrastructure-provisioning/src/; docker build --build-arg OS={2} "
                      "--file general/files/{3}/{0}_Dockerfile -t docker.datalab-{0}:{1} .".format(name, tag,
                                                                                                   args.os_family,
                                                                                                   args.cloud_provider,
                                                                                                   args.datalab_path))
-        sudo('rm -f {}sources/infrastructure-provisioning/src/base/azure_auth.json'.format(args.datalab_path))
+        conn.sudo('rm -f {}sources/infrastructure-provisioning/src/base/azure_auth.json'.format(args.datalab_path))
         return True
     except:
         return False
@@ -146,36 +146,36 @@ def build_docker_images(image_list, region, datalab_path):
 def configure_guacamole():
     try:
         mysql_pass = id_generator()
-        sudo('docker run --name guacd --restart unless-stopped -d -p 4822:4822 guacamole/guacd')
-        sudo('docker run --rm guacamole/guacamole /opt/guacamole/bin/initdb.sh --mysql > initdb.sql')
-        sudo('mkdir /tmp/scripts')
-        sudo('cp initdb.sql /tmp/scripts')
-        sudo('mkdir /opt/mysql')
-        sudo('docker run --name guac-mysql --restart unless-stopped -v /tmp/scripts:/tmp/scripts '\
+        conn.sudo('docker run --name guacd --restart unless-stopped -d -p 4822:4822 guacamole/guacd')
+        conn.sudo('docker run --rm guacamole/guacamole /opt/guacamole/bin/initdb.sh --mysql > initdb.sql')
+        conn.sudo('mkdir /tmp/scripts')
+        conn.sudo('cp initdb.sql /tmp/scripts')
+        conn.sudo('mkdir /opt/mysql')
+        conn.sudo('docker run --name guac-mysql --restart unless-stopped -v /tmp/scripts:/tmp/scripts '\
              ' -v /opt/mysql:/var/lib/mysql -e MYSQL_ROOT_PASSWORD={} -d mysql:latest'.format(mysql_pass))
         time.sleep(180)
-        sudo('touch /opt/mysql/dock-query.sql')
-        sudo("""echo "CREATE DATABASE guacamole; CREATE USER 'guacamole' IDENTIFIED BY '{}';""" \
+        conn.sudo('touch /opt/mysql/dock-query.sql')
+        conn.sudo("""echo "CREATE DATABASE guacamole; CREATE USER 'guacamole' IDENTIFIED BY '{}';""" \
              """ GRANT SELECT,INSERT,UPDATE,DELETE ON guacamole.* TO 'guacamole';" > /opt/mysql/dock-query.sql""" \
              .format(mysql_pass))
-        sudo('docker exec -i guac-mysql /bin/bash -c "mysql -u root -p{} < /var/lib/mysql/dock-query.sql"' \
+        conn.sudo('docker exec -i guac-mysql /bin/bash -c "mysql -u root -p{} < /var/lib/mysql/dock-query.sql"' \
              .format(mysql_pass))
-        sudo('docker exec -i guac-mysql /bin/bash -c "cat /tmp/scripts/initdb.sql | mysql -u root -p{} guacamole"' \
+        conn.sudo('docker exec -i guac-mysql /bin/bash -c "cat /tmp/scripts/initdb.sql | mysql -u root -p{} guacamole"' \
              .format(mysql_pass))
-        sudo("docker run --name guacamole --restart unless-stopped --link guacd:guacd --link guac-mysql:mysql" \
+        conn.sudo("docker run --name guacamole --restart unless-stopped --link guacd:guacd --link guac-mysql:mysql" \
              " -e MYSQL_DATABASE='guacamole' -e MYSQL_USER='guacamole' -e MYSQL_PASSWORD='{}'" \
              " -d -p 8080:8080 guacamole/guacamole".format(mysql_pass))
         # create cronjob for run containers on reboot
-        sudo('mkdir /opt/datalab/cron')
-        sudo('touch /opt/datalab/cron/mysql.sh')
-        sudo('chmod 755 /opt/datalab/cron/mysql.sh')
-        sudo('echo "docker start guacd" >> /opt/datalab/cron/mysql.sh')
-        sudo('echo "docker start guac-mysql" >> /opt/datalab/cron/mysql.sh')
-        sudo('echo "docker rm guacamole" >> /opt/datalab/cron/mysql.sh')
-        sudo("""echo "docker run --name guacamole --restart unless-stopped --link guacd:guacd --link guac-mysql:mysql""" \
+        conn.sudo('mkdir /opt/datalab/cron')
+        conn.sudo('touch /opt/datalab/cron/mysql.sh')
+        conn.sudo('chmod 755 /opt/datalab/cron/mysql.sh')
+        conn.sudo('echo "docker start guacd" >> /opt/datalab/cron/mysql.sh')
+        conn.sudo('echo "docker start guac-mysql" >> /opt/datalab/cron/mysql.sh')
+        conn.sudo('echo "docker rm guacamole" >> /opt/datalab/cron/mysql.sh')
+        conn.sudo("""echo "docker run --name guacamole --restart unless-stopped --link guacd:guacd --link guac-mysql:mysql""" \
              """ -e MYSQL_DATABASE='guacamole' -e MYSQL_USER='guacamole' -e MYSQL_PASSWORD='{}' -d""" \
              """ -p 8080:8080 guacamole/guacamole" >> /opt/datalab/cron/mysql.sh""".format(mysql_pass))
-        sudo('(crontab -l 2>/dev/null; echo "@reboot sh /opt/datalab/cron/mysql.sh") | crontab -')
+        conn.sudo('(crontab -l 2>/dev/null; echo "@reboot sh /opt/datalab/cron/mysql.sh") | crontab -')
         return True
     except Exception as err:
         traceback.print_exc()
diff --git a/infrastructure-provisioning/src/ssn/scripts/configure_gitlab.py b/infrastructure-provisioning/src/ssn/scripts/configure_gitlab.py
index 2310c8a..200f4cd 100644
--- a/infrastructure-provisioning/src/ssn/scripts/configure_gitlab.py
+++ b/infrastructure-provisioning/src/ssn/scripts/configure_gitlab.py
@@ -48,15 +48,15 @@ def create_user(os_user):
     datalab.fab.init_datalab_connection(args.instance_ip, initial_user, args.keyfile)
 
     try:
-        sudo('useradd -m -G {1} -s /bin/bash {0}'.format(os_user, sudo_group))
-        sudo('echo "{} ALL = NOPASSWD:ALL" >> /etc/sudoers'.format(os_user))
-        sudo('mkdir /home/{}/.ssh'.format(os_user))
-        sudo('chown -R {0}:{0} /home/{1}/.ssh/'.format(initial_user, os_user))
-        sudo('cat /home/{0}/.ssh/authorized_keys > /home/{1}/.ssh/authorized_keys'.format(initial_user, os_user))
-        sudo('chown -R {0}:{0} /home/{0}/.ssh/'.format(os_user))
-        sudo('chmod 700 /home/{0}/.ssh'.format(os_user))
-        sudo('chmod 600 /home/{0}/.ssh/authorized_keys'.format(os_user))
-        sudo('touch /home/{}/.ssh_user_ensured'.format(initial_user))
+        conn.sudo('useradd -m -G {1} -s /bin/bash {0}'.format(os_user, sudo_group))
+        conn.sudo('echo "{} ALL = NOPASSWD:ALL" >> /etc/sudoers'.format(os_user))
+        conn.sudo('mkdir /home/{}/.ssh'.format(os_user))
+        conn.sudo('chown -R {0}:{0} /home/{1}/.ssh/'.format(initial_user, os_user))
+        conn.sudo('cat /home/{0}/.ssh/authorized_keys > /home/{1}/.ssh/authorized_keys'.format(initial_user, os_user))
+        conn.sudo('chown -R {0}:{0} /home/{0}/.ssh/'.format(os_user))
+        conn.sudo('chmod 700 /home/{0}/.ssh'.format(os_user))
+        conn.sudo('chmod 600 /home/{0}/.ssh/authorized_keys'.format(os_user))
+        conn.sudo('touch /home/{}/.ssh_user_ensured'.format(initial_user))
     except Exception as err:
         print('Failed to install gitlab.{}'.format(str(err)))
         sys.exit(1)
@@ -105,32 +105,32 @@ def install_gitlab():
     try:
         print('Installing gitlab...')
         if os.environ['conf_os_family'] == 'debian':
-            sudo('curl -sS https://packages.gitlab.com/install/repositories/gitlab/gitlab-ce/script.deb.sh | sudo bash')
-            sudo('apt install gitlab-ce -y')
+            conn.sudo('curl -sS https://packages.gitlab.com/install/repositories/gitlab/gitlab-ce/script.deb.sh | sudo bash')
+            conn.sudo('apt install gitlab-ce -y')
         elif os.environ['conf_os_family'] == 'redhat':
-            sudo('curl -sS https://packages.gitlab.com/install/repositories/gitlab/gitlab-ce/script.rpm.sh | sudo bash')
-            sudo('yum install gitlab-ce -y')
+            conn.sudo('curl -sS https://packages.gitlab.com/install/repositories/gitlab/gitlab-ce/script.rpm.sh | sudo bash')
+            conn.sudo('yum install gitlab-ce -y')
         else:
             print('Failed to install gitlab.')
             raise Exception
 
         with lcd('{}tmp/gitlab'.format(os.environ['conf_datalab_path'])):
-            put('gitlab.rb', '/tmp/gitlab.rb')
+            conn.put('gitlab.rb', '/tmp/gitlab.rb')
             local('rm gitlab.rb')
-        sudo('rm /etc/gitlab/gitlab.rb')
-        sudo('mv /tmp/gitlab.rb /etc/gitlab/gitlab.rb')
+        conn.sudo('rm /etc/gitlab/gitlab.rb')
+        conn.sudo('mv /tmp/gitlab.rb /etc/gitlab/gitlab.rb')
 
         if json.loads(os.environ['gitlab_ssl_enabled']):
-            sudo('mkdir -p /etc/gitlab/ssl')
-            sudo('openssl req -x509 -nodes -days 3650 -newkey rsa:2048 -keyout {0} \
+            conn.sudo('mkdir -p /etc/gitlab/ssl')
+            conn.sudo('openssl req -x509 -nodes -days 3650 -newkey rsa:2048 -keyout {0} \
                     -out {1} -subj "/C=US/ST=US/L=US/O=datalab/CN={2}"'.format(os.environ['gitlab_ssl_certificate_key'],
                                                                                os.environ['gitlab_ssl_certificate'],
                                                                                os.environ['instance_hostname']))
-            sudo('openssl dhparam -out {} 2048'.format(os.environ['gitlab_ssl_dhparams']))
+            conn.sudo('openssl dhparam -out {} 2048'.format(os.environ['gitlab_ssl_dhparams']))
             get('{}'.format(os.environ['gitlab_ssl_certificate']),
                 '{}tmp/gitlab'.format(os.environ['conf_datalab_path']))
 
-        sudo('gitlab-ctl reconfigure')
+        conn.sudo('gitlab-ctl reconfigure')
     except Exception as err:
         print('Failed to install gitlab.{}'.format(str(err)))
         sys.exit(1)
@@ -145,16 +145,16 @@ def configure_gitlab():
             proto = 'http'
 
         with settings(hide('everything')):
-            raw = run('curl -k --request POST "{0}://localhost/api/v4/session?login=root&password={1}"'
+            raw = conn.run('curl -k --request POST "{0}://localhost/api/v4/session?login=root&password={1}"'
                     .format(proto, os.environ['gitlab_root_password']))
             data = json.loads(raw)
             if not json.loads(os.environ['gitlab_signup_enabled']):
                 print('Disabling signup...')
-                run('curl -k --request PUT "{0}://localhost/api/v4/application/settings?private_token={1}&sudo=root&signup_enabled=false"'
+                conn.run('curl -k --request PUT "{0}://localhost/api/v4/application/settings?private_token={1}&sudo=root&signup_enabled=false"'
                     .format(proto, data['private_token']))
             if not json.loads(os.environ['gitlab_public_repos']):
                 print('Disabling public repos...')
-                run('curl -k --request PUT "{0}://localhost/api/v4/application/settings?private_token={1}&sudo=root&restricted_visibility_levels=public"'
+                conn.run('curl -k --request PUT "{0}://localhost/api/v4/application/settings?private_token={1}&sudo=root&restricted_visibility_levels=public"'
                     .format(proto, data['private_token']))
     except Exception as err:
         print("Failed to connect to GitLab via API..{}".format(str(err)))
diff --git a/infrastructure-provisioning/src/ssn/scripts/configure_ssn_node.py b/infrastructure-provisioning/src/ssn/scripts/configure_ssn_node.py
index 9e4bbc2..7df937d 100644
--- a/infrastructure-provisioning/src/ssn/scripts/configure_ssn_node.py
+++ b/infrastructure-provisioning/src/ssn/scripts/configure_ssn_node.py
@@ -44,7 +44,7 @@ args = parser.parse_args()
 
 def set_hostname(subdomain, hosted_zone_name):
     try:
-        sudo('hostnamectl set-hostname {0}.{1}'.format(subdomain, hosted_zone_name))
+        conn.sudo('hostnamectl set-hostname {0}.{1}'.format(subdomain, hosted_zone_name))
     except Exception as err:
         traceback.print_exc()
         print('Failed to set hostname: ', str(err))
@@ -52,7 +52,7 @@ def set_hostname(subdomain, hosted_zone_name):
 
 def set_resolve():
     try:
-        sudo('ln -sf /run/systemd/resolve/resolv.conf /etc/resolv.conf')
+        conn.sudo('ln -sf /run/systemd/resolve/resolv.conf /etc/resolv.conf')
     except Exception as err:
         traceback.print_exc()
         print('Failed to set resolve: ', str(err))
@@ -61,10 +61,10 @@ def set_resolve():
 def cp_key(keyfile, host_string, os_user):
     try:
         key_name=keyfile.split("/")
-        sudo('mkdir -p /home/' + os_user + '/keys')
-        sudo('chown -R ' + os_user + ':' + os_user + ' /home/' + os_user + '/keys')
+        conn.sudo('mkdir -p /home/' + os_user + '/keys')
+        conn.sudo('chown -R ' + os_user + ':' + os_user + ' /home/' + os_user + '/keys')
         local('scp -r -q -i {0} {0} {1}:/home/{3}/keys/{2}'.format(keyfile, host_string, key_name[-1], os_user))
-        sudo('chmod 600 /home/' + os_user + '/keys/*.pem')
+        conn.sudo('chmod 600 /home/' + os_user + '/keys/*.pem')
     except Exception as err:
         traceback.print_exc()
         print('Failed to copy key: ', str(err))
@@ -74,9 +74,9 @@ def cp_key(keyfile, host_string, os_user):
 def cp_backup_scripts(datalab_path):
     try:
         with cd(datalab_path + "tmp/"):
-            put('/root/scripts/backup.py', "backup.py")
-            put('/root/scripts/restore.py', "restore.py")
-            run('chmod +x backup.py restore.py')
+            conn.put('/root/scripts/backup.py', "backup.py")
+            conn.put('/root/scripts/restore.py', "restore.py")
+            conn.run('chmod +x backup.py restore.py')
     except Exception as err:
         traceback.print_exc()
         print('Failed to copy backup scripts: ', str(err))
@@ -86,18 +86,18 @@ def cp_backup_scripts(datalab_path):
 def cp_gitlab_scripts(datalab_path):
     try:
         if not exists('{}tmp/gitlab'.format(datalab_path)):
-            run('mkdir -p {}tmp/gitlab'.format(datalab_path))
+            conn.run('mkdir -p {}tmp/gitlab'.format(datalab_path))
         with cd('{}tmp/gitlab'.format(datalab_path)):
-            put('/root/scripts/gitlab_deploy.py', 'gitlab_deploy.py')
-            put('/root/scripts/configure_gitlab.py', 'configure_gitlab.py')
-            run('chmod +x gitlab_deploy.py configure_gitlab.py')
-            put('/root/templates/gitlab.rb', 'gitlab.rb')
-            put('/root/templates/gitlab.ini', 'gitlab.ini')
-            run('sed -i "s/CONF_OS_USER/{}/g" gitlab.ini'.format(os.environ['conf_os_user']))
-            run('sed -i "s/CONF_OS_FAMILY/{}/g" gitlab.ini'.format(os.environ['conf_os_family']))
-            run('sed -i "s/CONF_KEY_NAME/{}/g" gitlab.ini'.format(os.environ['conf_key_name']))
-            run('sed -i "s,CONF_DATALAB_PATH,{},g" gitlab.ini'.format(datalab_path))
-            run('sed -i "s/SERVICE_BASE_NAME/{}/g" gitlab.ini'.format(os.environ['conf_service_base_name']))
+            conn.put('/root/scripts/gitlab_deploy.py', 'gitlab_deploy.py')
+            conn.put('/root/scripts/configure_gitlab.py', 'configure_gitlab.py')
+            conn.run('chmod +x gitlab_deploy.py configure_gitlab.py')
+            conn.put('/root/templates/gitlab.rb', 'gitlab.rb')
+            conn.put('/root/templates/gitlab.ini', 'gitlab.ini')
+            conn.run('sed -i "s/CONF_OS_USER/{}/g" gitlab.ini'.format(os.environ['conf_os_user']))
+            conn.run('sed -i "s/CONF_OS_FAMILY/{}/g" gitlab.ini'.format(os.environ['conf_os_family']))
+            conn.run('sed -i "s/CONF_KEY_NAME/{}/g" gitlab.ini'.format(os.environ['conf_key_name']))
+            conn.run('sed -i "s,CONF_DATALAB_PATH,{},g" gitlab.ini'.format(datalab_path))
+            conn.run('sed -i "s/SERVICE_BASE_NAME/{}/g" gitlab.ini'.format(os.environ['conf_service_base_name']))
     except Exception as err:
         traceback.print_exc()
         print('Failed to copy gitlab scripts: ', str(err))
@@ -107,22 +107,22 @@ def cp_gitlab_scripts(datalab_path):
 def creating_service_directories(datalab_path, os_user):
     try:
         if not exists(datalab_path):
-            sudo('mkdir -p ' + datalab_path)
-            sudo('mkdir -p ' + datalab_path + 'conf')
-            sudo('mkdir -p ' + datalab_path + 'webapp/static')
-            sudo('mkdir -p ' + datalab_path + 'template')
-            sudo('mkdir -p ' + datalab_path + 'tmp')
-            sudo('mkdir -p ' + datalab_path + 'tmp/result')
-            sudo('mkdir -p ' + datalab_path + 'sources')
-            sudo('mkdir -p /var/opt/datalab/log/ssn')
-            sudo('mkdir -p /var/opt/datalab/log/edge')
-            sudo('mkdir -p /var/opt/datalab/log/notebook')
-            sudo('mkdir -p /var/opt/datalab/log/dataengine-service')
-            sudo('mkdir -p /var/opt/datalab/log/dataengine')
-            sudo('ln -s ' + datalab_path + 'conf /etc/opt/datalab')
-            sudo('ln -s /var/opt/datalab/log /var/log/datalab')
-            sudo('chown -R ' + os_user + ':' + os_user + ' /var/opt/datalab/log')
-            sudo('chown -R ' + os_user + ':' + os_user + ' ' + datalab_path)
+            conn.sudo('mkdir -p ' + datalab_path)
+            conn.sudo('mkdir -p ' + datalab_path + 'conf')
+            conn.sudo('mkdir -p ' + datalab_path + 'webapp/static')
+            conn.sudo('mkdir -p ' + datalab_path + 'template')
+            conn.sudo('mkdir -p ' + datalab_path + 'tmp')
+            conn.sudo('mkdir -p ' + datalab_path + 'tmp/result')
+            conn.sudo('mkdir -p ' + datalab_path + 'sources')
+            conn.sudo('mkdir -p /var/opt/datalab/log/ssn')
+            conn.sudo('mkdir -p /var/opt/datalab/log/edge')
+            conn.sudo('mkdir -p /var/opt/datalab/log/notebook')
+            conn.sudo('mkdir -p /var/opt/datalab/log/dataengine-service')
+            conn.sudo('mkdir -p /var/opt/datalab/log/dataengine')
+            conn.sudo('ln -s ' + datalab_path + 'conf /etc/opt/datalab')
+            conn.sudo('ln -s /var/opt/datalab/log /var/log/datalab')
+            conn.sudo('chown -R ' + os_user + ':' + os_user + ' /var/opt/datalab/log')
+            conn.sudo('chown -R ' + os_user + ':' + os_user + ' ' + datalab_path)
     except Exception as err:
         traceback.print_exc()
         print('Failed to create service directories: ', str(err))
@@ -132,64 +132,64 @@ def creating_service_directories(datalab_path, os_user):
 def configure_ssl_certs(hostname, custom_ssl_cert):
     try:
         if custom_ssl_cert:
-            put('/root/certs/datalab.crt', 'datalab.crt')
-            put('/root/certs/datalab.key', 'datalab.key')
-            sudo('mv datalab.crt /etc/ssl/certs/datalab.crt')
-            sudo('mv datalab.key /etc/ssl/certs/datalab.key')
+            conn.put('/root/certs/datalab.crt', 'datalab.crt')
+            conn.put('/root/certs/datalab.key', 'datalab.key')
+            conn.sudo('mv datalab.crt /etc/ssl/certs/datalab.crt')
+            conn.sudo('mv datalab.key /etc/ssl/certs/datalab.key')
         else:
             if os.environ['conf_stepcerts_enabled'] == 'true':
                 ensure_step(args.os_user)
-                sudo('mkdir -p /home/{0}/keys'.format(args.os_user))
-                sudo('''bash -c 'echo "{0}" | base64 --decode > /etc/ssl/certs/root_ca.crt' '''.format(
+                conn.sudo('mkdir -p /home/{0}/keys'.format(args.os_user))
+                conn.sudo('''bash -c 'echo "{0}" | base64 --decode > /etc/ssl/certs/root_ca.crt' '''.format(
                      os.environ['conf_stepcerts_root_ca']))
-                fingerprint = sudo('step certificate fingerprint /etc/ssl/certs/root_ca.crt')
-                sudo('step ca bootstrap --fingerprint {0} --ca-url "{1}"'.format(fingerprint,
+                fingerprint = conn.sudo('step certificate fingerprint /etc/ssl/certs/root_ca.crt')
+                conn.sudo('step ca bootstrap --fingerprint {0} --ca-url "{1}"'.format(fingerprint,
                                                                                  os.environ['conf_stepcerts_ca_url']))
-                sudo('echo "{0}" > /home/{1}/keys/provisioner_password'.format(
+                conn.sudo('echo "{0}" > /home/{1}/keys/provisioner_password'.format(
                      os.environ['conf_stepcerts_kid_password'], args.os_user))
                 sans = "--san localhost --san 127.0.0.1 {0}".format(args.step_cert_sans)
                 cn = hostname
-                sudo('step ca token {3} --kid {0} --ca-url "{1}" --root /etc/ssl/certs/root_ca.crt '
+                conn.sudo('step ca token {3} --kid {0} --ca-url "{1}" --root /etc/ssl/certs/root_ca.crt '
                      '--password-file /home/{2}/keys/provisioner_password {4} --output-file /tmp/step_token'.format(
                               os.environ['conf_stepcerts_kid'], os.environ['conf_stepcerts_ca_url'],
                               args.os_user, cn, sans))
-                token = sudo('cat /tmp/step_token')
-                sudo('step ca certificate "{0}" /etc/ssl/certs/datalab.crt /etc/ssl/certs/datalab.key '
+                token = conn.sudo('cat /tmp/step_token')
+                conn.sudo('step ca certificate "{0}" /etc/ssl/certs/datalab.crt /etc/ssl/certs/datalab.key '
                      '--token "{1}" --kty=RSA --size 2048 --provisioner {2} '.format(cn, token,
                                                                                      os.environ['conf_stepcerts_kid']))
-                sudo('touch /var/log/renew_certificates.log')
-                put('/root/templates/renew_certificates.sh', '/tmp/renew_certificates.sh')
-                sudo('mv /tmp/renew_certificates.sh /usr/local/bin/')
-                sudo('chmod +x /usr/local/bin/renew_certificates.sh')
-                sudo('sed -i "s/OS_USER/{0}/g" /usr/local/bin/renew_certificates.sh'.format(args.os_user))
-                sudo('sed -i "s|JAVA_HOME|{0}|g" /usr/local/bin/renew_certificates.sh'.format(find_java_path_remote()))
-                sudo('sed -i "s|RESOURCE_TYPE|ssn|g" /usr/local/bin/renew_certificates.sh')
-                sudo('sed -i "s|CONF_FILE|ssn|g" /usr/local/bin/renew_certificates.sh')
-                put('/root/templates/manage_step_certs.sh', '/usr/local/bin/manage_step_certs.sh', use_sudo=True)
-                sudo('chmod +x /usr/local/bin/manage_step_certs.sh')
-                sudo('sed -i "s|STEP_ROOT_CERT_PATH|/etc/ssl/certs/root_ca.crt|g" '
+                conn.sudo('touch /var/log/renew_certificates.log')
+                conn.put('/root/templates/renew_certificates.sh', '/tmp/renew_certificates.sh')
+                conn.sudo('mv /tmp/renew_certificates.sh /usr/local/bin/')
+                conn.sudo('chmod +x /usr/local/bin/renew_certificates.sh')
+                conn.sudo('sed -i "s/OS_USER/{0}/g" /usr/local/bin/renew_certificates.sh'.format(args.os_user))
+                conn.sudo('sed -i "s|JAVA_HOME|{0}|g" /usr/local/bin/renew_certificates.sh'.format(find_java_path_remote()))
+                conn.sudo('sed -i "s|RESOURCE_TYPE|ssn|g" /usr/local/bin/renew_certificates.sh')
+                conn.sudo('sed -i "s|CONF_FILE|ssn|g" /usr/local/bin/renew_certificates.sh')
+                conn.put('/root/templates/manage_step_certs.sh', '/usr/local/bin/manage_step_certs.sh', use_sudo=True)
+                conn.sudo('chmod +x /usr/local/bin/manage_step_certs.sh')
+                conn.sudo('sed -i "s|STEP_ROOT_CERT_PATH|/etc/ssl/certs/root_ca.crt|g" '
                      '/usr/local/bin/manage_step_certs.sh')
-                sudo('sed -i "s|STEP_CERT_PATH|/etc/ssl/certs/datalab.crt|g" /usr/local/bin/manage_step_certs.sh')
-                sudo('sed -i "s|STEP_KEY_PATH|/etc/ssl/certs/datalab.key|g" /usr/local/bin/manage_step_certs.sh')
-                sudo('sed -i "s|STEP_CA_URL|{0}|g" /usr/local/bin/manage_step_certs.sh'.format(
+                conn.sudo('sed -i "s|STEP_CERT_PATH|/etc/ssl/certs/datalab.crt|g" /usr/local/bin/manage_step_certs.sh')
+                conn.sudo('sed -i "s|STEP_KEY_PATH|/etc/ssl/certs/datalab.key|g" /usr/local/bin/manage_step_certs.sh')
+                conn.sudo('sed -i "s|STEP_CA_URL|{0}|g" /usr/local/bin/manage_step_certs.sh'.format(
                     os.environ['conf_stepcerts_ca_url']))
-                sudo('sed -i "s|RESOURCE_TYPE|ssn|g" /usr/local/bin/manage_step_certs.sh')
-                sudo('sed -i "s|SANS|{0}|g" /usr/local/bin/manage_step_certs.sh'.format(sans))
-                sudo('sed -i "s|CN|{0}|g" /usr/local/bin/manage_step_certs.sh'.format(cn))
-                sudo('sed -i "s|KID|{0}|g" /usr/local/bin/manage_step_certs.sh'.format(
+                conn.sudo('sed -i "s|RESOURCE_TYPE|ssn|g" /usr/local/bin/manage_step_certs.sh')
+                conn.sudo('sed -i "s|SANS|{0}|g" /usr/local/bin/manage_step_certs.sh'.format(sans))
+                conn.sudo('sed -i "s|CN|{0}|g" /usr/local/bin/manage_step_certs.sh'.format(cn))
+                conn.sudo('sed -i "s|KID|{0}|g" /usr/local/bin/manage_step_certs.sh'.format(
                     os.environ['conf_stepcerts_kid']))
-                sudo('sed -i "s|STEP_PROVISIONER_PASSWORD_PATH|/home/{0}/keys/provisioner_password|g" '
+                conn.sudo('sed -i "s|STEP_PROVISIONER_PASSWORD_PATH|/home/{0}/keys/provisioner_password|g" '
                      '/usr/local/bin/manage_step_certs.sh'.format(args.os_user))
-                sudo('bash -c \'echo "0 * * * * root /usr/local/bin/manage_step_certs.sh >> '
+                conn.sudo('bash -c \'echo "0 * * * * root /usr/local/bin/manage_step_certs.sh >> '
                      '/var/log/renew_certificates.log 2>&1" >> /etc/crontab \'')
-                put('/root/templates/step-cert-manager.service', '/etc/systemd/system/step-cert-manager.service',
+                conn.put('/root/templates/step-cert-manager.service', '/etc/systemd/system/step-cert-manager.service',
                     use_sudo=True)
-                sudo('systemctl daemon-reload')
-                sudo('systemctl enable step-cert-manager.service')
+                conn.sudo('systemctl daemon-reload')
+                conn.sudo('systemctl enable step-cert-manager.service')
             else:
-                sudo('openssl req -x509 -nodes -days 3650 -newkey rsa:2048 -keyout /etc/ssl/certs/datalab.key \
+                conn.sudo('openssl req -x509 -nodes -days 3650 -newkey rsa:2048 -keyout /etc/ssl/certs/datalab.key \
                      -out /etc/ssl/certs/datalab.crt -subj "/C=US/ST=US/L=US/O=datalab/CN={}"'.format(hostname))
-        sudo('openssl dhparam -out /etc/ssl/certs/dhparam.pem 2048')
+        conn.sudo('openssl dhparam -out /etc/ssl/certs/dhparam.pem 2048')
     except Exception as err:
         traceback.print_exc()
         print('Failed to configure SSL certificates: ', str(err))
@@ -197,9 +197,9 @@ def configure_ssl_certs(hostname, custom_ssl_cert):
 
 def docker_build_script():
     try:
-        put('/root/scripts/docker_build.py', 'docker_build')
-        sudo('chmod +x docker_build')
-        sudo('mv docker_build /usr/bin/docker-build')
+        conn.put('/root/scripts/docker_build.py', 'docker_build')
+        conn.sudo('chmod +x docker_build')
+        conn.sudo('mv docker_build /usr/bin/docker-build')
     except Exception as err:
         traceback.print_exc()
         print('Failed to configure docker_build script: ', str(err))
diff --git a/infrastructure-provisioning/src/ssn/scripts/configure_ui.py b/infrastructure-provisioning/src/ssn/scripts/configure_ui.py
index 6693f5a..6ffaba7 100644
--- a/infrastructure-provisioning/src/ssn/scripts/configure_ui.py
+++ b/infrastructure-provisioning/src/ssn/scripts/configure_ui.py
@@ -90,14 +90,14 @@ keystore_passwd = id_generator()
 
 def copy_ssn_libraries():
     try:
-        sudo('mkdir -p /usr/lib/python3.8/datalab/')
-        run('mkdir -p /tmp/datalab_libs/')
+        conn.sudo('mkdir -p /usr/lib/python3.8/datalab/')
+        conn.run('mkdir -p /tmp/datalab_libs/')
         local('scp -i {} /usr/lib/python3.8/datalab/*.py {}:/tmp/datalab_libs/'.format(args.keyfile, host_string))
-        run('chmod a+x /tmp/datalab_libs/*')
-        sudo('mv /tmp/datalab_libs/* /usr/lib/python3.8/datalab/')
+        conn.run('chmod a+x /tmp/datalab_libs/*')
+        conn.sudo('mv /tmp/datalab_libs/* /usr/lib/python3.8/datalab/')
         if exists('/usr/lib64'):
-            sudo('mkdir -p /usr/lib64/python3.8')
-            sudo('ln -fs /usr/lib/python3.8/datalab /usr/lib64/python3.8/datalab')
+            conn.sudo('mkdir -p /usr/lib64/python3.8')
+            conn.sudo('ln -fs /usr/lib/python3.8/datalab /usr/lib64/python3.8/datalab')
     except Exception as err:
         traceback.print_exc()
         print('Failed to copy ssn libraries: ', str(err))
@@ -113,29 +113,29 @@ def configure_mongo(mongo_passwd, default_endpoint_name):
                 local('sed -i "s/MONGO_USR/mongod/g" /root/templates/mongod.service_template')
             local('scp -i {} /root/templates/mongod.service_template {}:/tmp/mongod.service'.format(args.keyfile,
                                                                                                     host_string))
-            sudo('mv /tmp/mongod.service /lib/systemd/system/mongod.service')
-            sudo('systemctl daemon-reload')
-            sudo('systemctl enable mongod.service')
+            conn.sudo('mv /tmp/mongod.service /lib/systemd/system/mongod.service')
+            conn.sudo('systemctl daemon-reload')
+            conn.sudo('systemctl enable mongod.service')
         local('sed -i "s|PASSWORD|{}|g" /root/scripts/resource_status.py'.format(mongo_passwd))
         local('scp -i {} /root/scripts/resource_status.py {}:/tmp/resource_status.py'.format(args.keyfile,
                                                                                              host_string))
-        sudo('mv /tmp/resource_status.py ' + os.environ['ssn_datalab_path'] + 'tmp/')
+        conn.sudo('mv /tmp/resource_status.py ' + os.environ['ssn_datalab_path'] + 'tmp/')
         local('sed -i "s|PASSWORD|{}|g" /root/scripts/configure_mongo.py'.format(mongo_passwd))
         local('scp -i {} /root/scripts/configure_mongo.py {}:/tmp/configure_mongo.py'.format(args.keyfile,
                                                                                              host_string))
-        sudo('mv /tmp/configure_mongo.py ' + args.datalab_path + 'tmp/')
+        conn.sudo('mv /tmp/configure_mongo.py ' + args.datalab_path + 'tmp/')
         local('scp -i {} /root/files/{}/mongo_roles.json {}:/tmp/mongo_roles.json'.format(args.keyfile,
                                                                                           args.cloud_provider,
                                                                                           host_string))
         local('scp -i {} /root/files/local_endpoint.json {}:/tmp/local_endpoint.json'.format(args.keyfile,
                                                                                              host_string))
-        sudo('mv /tmp/mongo_roles.json ' + args.datalab_path + 'tmp/')
-        sudo('sed -i "s|DEF_ENDPOINT_NAME|{0}|g" /tmp/local_endpoint.json'.format(default_endpoint_name))
-        sudo('sed -i "s|CLOUD_PROVIDER|{0}|g" /tmp/local_endpoint.json'.format(
+        conn.sudo('mv /tmp/mongo_roles.json ' + args.datalab_path + 'tmp/')
+        conn.sudo('sed -i "s|DEF_ENDPOINT_NAME|{0}|g" /tmp/local_endpoint.json'.format(default_endpoint_name))
+        conn.sudo('sed -i "s|CLOUD_PROVIDER|{0}|g" /tmp/local_endpoint.json'.format(
             os.environ['conf_cloud_provider'].upper()))
-        sudo('mv /tmp/local_endpoint.json ' + args.datalab_path + 'tmp/')
-        sudo('pip3 install -U six==1.15.0')
-        sudo("python3 " + args.datalab_path + "tmp/configure_mongo.py --datalab_path {} ".format(
+        conn.sudo('mv /tmp/local_endpoint.json ' + args.datalab_path + 'tmp/')
+        conn.sudo('pip3 install -U six==1.15.0')
+        conn.sudo("python3 " + args.datalab_path + "tmp/configure_mongo.py --datalab_path {} ".format(
             args.datalab_path))
     except Exception as err:
         traceback.print_exc()
@@ -147,48 +147,48 @@ def build_ui():
     try:
         # Building Front-end
         with cd(args.datalab_path + '/sources/services/self-service/src/main/resources/webapp/'):
-            sudo('sed -i "s|CLOUD_PROVIDER|{}|g" src/dictionary/global.dictionary.ts'.format(args.cloud_provider))
+            conn.sudo('sed -i "s|CLOUD_PROVIDER|{}|g" src/dictionary/global.dictionary.ts'.format(args.cloud_provider))
 
             if args.cloud_provider == 'azure' and os.environ['azure_datalake_enable'] == 'true':
-                sudo('sed -i "s|\'use_ldap\': true|{}|g" src/dictionary/azure.dictionary.ts'.format(
+                conn.sudo('sed -i "s|\'use_ldap\': true|{}|g" src/dictionary/azure.dictionary.ts'.format(
                     '\'use_ldap\': false'))
 
-            sudo('echo "N" | npm install')
+            conn.sudo('echo "N" | npm install')
             manage_npm_pkg('run build.prod')
-            sudo('sudo chown -R {} {}/*'.format(args.os_user, args.datalab_path))
+            conn.sudo('sudo chown -R {} {}/*'.format(args.os_user, args.datalab_path))
 
         # Building Back-end
         with cd(args.datalab_path + '/sources/'):
-            sudo('/opt/maven/bin/mvn -P{} -DskipTests package'.format(args.cloud_provider))
+            conn.sudo('/opt/maven/bin/mvn -P{} -DskipTests package'.format(args.cloud_provider))
 
-        sudo('mkdir -p {}/webapp/'.format(args.datalab_path))
+        conn.sudo('mkdir -p {}/webapp/'.format(args.datalab_path))
         for service in ['self-service', 'provisioning-service', 'billing']:
-            sudo('mkdir -p {}/webapp/{}/lib/'.format(args.datalab_path, service))
-            sudo('mkdir -p {}/webapp/{}/conf/'.format(args.datalab_path, service))
-        sudo('cp {0}/sources/services/self-service/self-service.yml {0}/webapp/self-service/conf/'.format(
+            conn.sudo('mkdir -p {}/webapp/{}/lib/'.format(args.datalab_path, service))
+            conn.sudo('mkdir -p {}/webapp/{}/conf/'.format(args.datalab_path, service))
+        conn.sudo('cp {0}/sources/services/self-service/self-service.yml {0}/webapp/self-service/conf/'.format(
             args.datalab_path))
-        sudo('cp {0}/sources/services/self-service/target/self-service-*.jar {0}/webapp/self-service/lib/'.format(
+        conn.sudo('cp {0}/sources/services/self-service/target/self-service-*.jar {0}/webapp/self-service/lib/'.format(
             args.datalab_path))
-        sudo(
+        conn.sudo(
             'cp {0}/sources/services/provisioning-service/provisioning.yml {0}/webapp/provisioning-service/conf/'.format(
                 args.datalab_path))
-        sudo('cp {0}/sources/services/provisioning-service/target/provisioning-service-*.jar '
+        conn.sudo('cp {0}/sources/services/provisioning-service/target/provisioning-service-*.jar '
              '{0}/webapp/provisioning-service/lib/'.format(args.datalab_path))
 
         if args.cloud_provider == 'azure':
-            sudo('cp {0}/sources/services/billing-azure/billing.yml {0}/webapp/billing/conf/'.format(args.datalab_path))
-            sudo('cp {0}/sources/services/billing-azure/target/billing-azure*.jar {0}/webapp/billing/lib/'.format(
+            conn.sudo('cp {0}/sources/services/billing-azure/billing.yml {0}/webapp/billing/conf/'.format(args.datalab_path))
+            conn.sudo('cp {0}/sources/services/billing-azure/target/billing-azure*.jar {0}/webapp/billing/lib/'.format(
                 args.datalab_path))
         elif args.cloud_provider == 'aws':
-            sudo('cp {0}/sources/services/billing-aws/billing.yml {0}/webapp/billing/conf/'.format(args.datalab_path))
-            sudo('cp {0}/sources/services/billing-aws/src/main/resources/application.yml '
+            conn.sudo('cp {0}/sources/services/billing-aws/billing.yml {0}/webapp/billing/conf/'.format(args.datalab_path))
+            conn.sudo('cp {0}/sources/services/billing-aws/src/main/resources/application.yml '
                  '{0}/webapp/billing/conf/billing_app.yml'.format(args.datalab_path))
-            sudo(
+            conn.sudo(
                 'cp {0}/sources/services/billing-aws/target/billing-aws*.jar {0}/webapp/billing/lib/'.format(
                     args.datalab_path))
         elif args.cloud_provider == 'gcp':
-            sudo('cp {0}/sources/services/billing-gcp/billing.yml {0}/webapp/billing/conf/'.format(args.datalab_path))
-            sudo(
+            conn.sudo('cp {0}/sources/services/billing-gcp/billing.yml {0}/webapp/billing/conf/'.format(args.datalab_path))
+            conn.sudo(
                 'cp {0}/sources/services/billing-gcp/target/billing-gcp*.jar {0}/webapp/billing/lib/'.format(
                     args.datalab_path))
     except Exception as err:
@@ -221,8 +221,8 @@ if __name__ == "__main__":
     print("Configuring MongoDB")
     configure_mongo(mongo_passwd, args.default_endpoint_name)
 
-    sudo('echo DATALAB_CONF_DIR={} >> /etc/profile'.format(datalab_conf_dir))
-    sudo('echo export DATALAB_CONF_DIR >> /etc/profile')
+    conn.sudo('echo DATALAB_CONF_DIR={} >> /etc/profile'.format(datalab_conf_dir))
+    conn.sudo('echo export DATALAB_CONF_DIR >> /etc/profile')
 
     print("Installing build dependencies for UI")
     install_build_dep()
diff --git a/infrastructure-provisioning/src/ssn/scripts/upload_response_file.py b/infrastructure-provisioning/src/ssn/scripts/upload_response_file.py
index cc884a5..93cfbd4 100644
--- a/infrastructure-provisioning/src/ssn/scripts/upload_response_file.py
+++ b/infrastructure-provisioning/src/ssn/scripts/upload_response_file.py
@@ -40,11 +40,11 @@ def upload_response_file(instance_name, local_log_filepath, os_user):
     pkey = "{}{}.pem".format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
     datalab.fab.init_datalab_connection(args.instance_hostname, os_user, pkey)
     try:
-        put('/root/result.json', '/home/{}/{}.json'.format(os_user, os.environ['request_id']))
-        sudo('mv /home/{}/{}.json {}tmp/result/'.format(os_user, os.environ['request_id'],
+        conn.put('/root/result.json', '/home/{}/{}.json'.format(os_user, os.environ['request_id']))
+        conn.sudo('mv /home/{}/{}.json {}tmp/result/'.format(os_user, os.environ['request_id'],
                                                         os.environ['ssn_datalab_path']))
-        put(local_log_filepath, '/home/{}/ssn.log'.format(os_user))
-        sudo('mv /home/{}/ssn.log /var/opt/datalab/log/ssn/'.format(os_user))
+        conn.put(local_log_filepath, '/home/{}/ssn.log'.format(os_user))
+        conn.sudo('mv /home/{}/ssn.log /var/opt/datalab/log/ssn/'.format(os_user))
         datalab.fab.close_connection()
         return True
     except:
diff --git a/infrastructure-provisioning/src/superset/scripts/configure_superset_node.py b/infrastructure-provisioning/src/superset/scripts/configure_superset_node.py
index c30fcd9..b948970 100644
--- a/infrastructure-provisioning/src/superset/scripts/configure_superset_node.py
+++ b/infrastructure-provisioning/src/superset/scripts/configure_superset_node.py
@@ -57,7 +57,7 @@ if __name__ == "__main__":
     print("Prepare .ensure directory")
     try:
         if not exists('/home/' + args.os_user + '/.ensure_dir'):
-            sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
+            conn.sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
     except:
         sys.exit(1)
     #print("Mount additional volume")
diff --git a/infrastructure-provisioning/src/tensor-rstudio/scripts/configure_tensor-rstudio_node.py b/infrastructure-provisioning/src/tensor-rstudio/scripts/configure_tensor-rstudio_node.py
index 9185753..3cb3864 100644
--- a/infrastructure-provisioning/src/tensor-rstudio/scripts/configure_tensor-rstudio_node.py
+++ b/infrastructure-provisioning/src/tensor-rstudio/scripts/configure_tensor-rstudio_node.py
@@ -80,7 +80,7 @@ if __name__ == "__main__":
     print("Prepare .ensure directory")
     try:
         if not exists('/home/' + args.os_user + '/.ensure_dir'):
-            sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
+            conn.sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
     except:
         sys.exit(1)
     print("Mount additional volume")
diff --git a/infrastructure-provisioning/src/tensor/scripts/configure_tensor_node.py b/infrastructure-provisioning/src/tensor/scripts/configure_tensor_node.py
index 2881caa..fbd809a 100644
--- a/infrastructure-provisioning/src/tensor/scripts/configure_tensor_node.py
+++ b/infrastructure-provisioning/src/tensor/scripts/configure_tensor_node.py
@@ -79,7 +79,7 @@ if __name__ == "__main__":
     print("Prepare .ensure directory")
     try:
         if not exists('/home/' + args.os_user + '/.ensure_dir'):
-            sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
+            conn.sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
     except:
         sys.exit(1)
     print("Mount additional volume")
diff --git a/infrastructure-provisioning/src/zeppelin/scripts/configure_zeppelin_node.py b/infrastructure-provisioning/src/zeppelin/scripts/configure_zeppelin_node.py
index 9a24290..b288f7b 100644
--- a/infrastructure-provisioning/src/zeppelin/scripts/configure_zeppelin_node.py
+++ b/infrastructure-provisioning/src/zeppelin/scripts/configure_zeppelin_node.py
@@ -79,49 +79,49 @@ gitlab_certfile = os.environ['conf_gitlab_certfile']
 def configure_zeppelin(os_user):
     if not exists('/home/' + os_user + '/.ensure_dir/zeppelin_ensured'):
         try:
-            sudo('wget ' + zeppelin_link + ' -O /tmp/zeppelin-' + zeppelin_version + '-bin-netinst.tgz')
-            sudo('tar -zxvf /tmp/zeppelin-' + zeppelin_version + '-bin-netinst.tgz -C /opt/')
-            sudo('ln -s /opt/zeppelin-' + zeppelin_version + '-bin-netinst /opt/zeppelin')
-            sudo('cp /opt/zeppelin/conf/zeppelin-env.sh.template /opt/zeppelin/conf/zeppelin-env.sh')
-            java_home = run("update-alternatives --query java | grep -o \'/.*/java-8.*/jre\'").splitlines()[0]
-            sudo("echo 'export JAVA_HOME=\'{}\'' >> /opt/zeppelin/conf/zeppelin-env.sh".format(java_home))
-            sudo('cp /opt/zeppelin/conf/zeppelin-site.xml.template /opt/zeppelin/conf/zeppelin-site.xml')
-            sudo('sed -i \"/# export ZEPPELIN_PID_DIR/c\export ZEPPELIN_PID_DIR=/var/run/zeppelin\" /opt/zeppelin/conf/zeppelin-env.sh')
-            sudo('sed -i \"/# export ZEPPELIN_IDENT_STRING/c\export ZEPPELIN_IDENT_STRING=notebook\" /opt/zeppelin/conf/zeppelin-env.sh')
-            sudo('sed -i \"/# export ZEPPELIN_INTERPRETER_DEP_MVNREPO/c\export ZEPPELIN_INTERPRETER_DEP_MVNREPO=https://repo1.maven.org/maven2\" /opt/zeppelin/conf/zeppelin-env.sh')
-            sudo('sed -i \"/# export SPARK_HOME/c\export SPARK_HOME=\/opt\/spark/\" /opt/zeppelin/conf/zeppelin-env.sh')
-            sudo('sed -i \'s/127.0.0.1/0.0.0.0/g\' /opt/zeppelin/conf/zeppelin-site.xml')
-            sudo('mkdir /var/log/zeppelin')
-            sudo('mkdir /var/run/zeppelin')
-            sudo('ln -s /var/log/zeppelin /opt/zeppelin-' + zeppelin_version + '-bin-netinst/logs')
-            sudo('chown ' + os_user + ':' + os_user + ' -R /var/log/zeppelin')
-            sudo('ln -s /var/run/zeppelin /opt/zeppelin-' + zeppelin_version + '-bin-netinst/run')
-            sudo('chown ' + os_user + ':' + os_user + ' -R /var/run/zeppelin')
-            sudo('/opt/zeppelin/bin/install-interpreter.sh --name ' + zeppelin_interpreters + ' --proxy-url $http_proxy')
-            sudo('chown ' + os_user + ':' + os_user + ' -R /opt/zeppelin-' + zeppelin_version + '-bin-netinst')
-            sudo('mkdir -p /opt/zeppelin/lib/interpreter/')
-            sudo('cp /opt/zeppelin-' + zeppelin_version + '-bin-netinst/interpreter/md/zeppelin-markdown-*.jar /opt/zeppelin/lib/interpreter/') # necessary when executing paragraph launches java process with "-cp :/opt/zeppelin/lib/interpreter/*:"
-            sudo('cp /opt/zeppelin-' + zeppelin_version + '-bin-netinst/interpreter/shell/zeppelin-shell-*.jar /opt/zeppelin/lib/interpreter/')
+            conn.sudo('wget ' + zeppelin_link + ' -O /tmp/zeppelin-' + zeppelin_version + '-bin-netinst.tgz')
+            conn.sudo('tar -zxvf /tmp/zeppelin-' + zeppelin_version + '-bin-netinst.tgz -C /opt/')
+            conn.sudo('ln -s /opt/zeppelin-' + zeppelin_version + '-bin-netinst /opt/zeppelin')
+            conn.sudo('cp /opt/zeppelin/conf/zeppelin-env.sh.template /opt/zeppelin/conf/zeppelin-env.sh')
+            java_home = conn.run("update-alternatives --query java | grep -o \'/.*/java-8.*/jre\'").splitlines()[0]
+            conn.sudo("echo 'export JAVA_HOME=\'{}\'' >> /opt/zeppelin/conf/zeppelin-env.sh".format(java_home))
+            conn.sudo('cp /opt/zeppelin/conf/zeppelin-site.xml.template /opt/zeppelin/conf/zeppelin-site.xml')
+            conn.sudo('sed -i \"/# export ZEPPELIN_PID_DIR/c\export ZEPPELIN_PID_DIR=/var/run/zeppelin\" /opt/zeppelin/conf/zeppelin-env.sh')
+            conn.sudo('sed -i \"/# export ZEPPELIN_IDENT_STRING/c\export ZEPPELIN_IDENT_STRING=notebook\" /opt/zeppelin/conf/zeppelin-env.sh')
+            conn.sudo('sed -i \"/# export ZEPPELIN_INTERPRETER_DEP_MVNREPO/c\export ZEPPELIN_INTERPRETER_DEP_MVNREPO=https://repo1.maven.org/maven2\" /opt/zeppelin/conf/zeppelin-env.sh')
+            conn.sudo('sed -i \"/# export SPARK_HOME/c\export SPARK_HOME=\/opt\/spark/\" /opt/zeppelin/conf/zeppelin-env.sh')
+            conn.sudo('sed -i \'s/127.0.0.1/0.0.0.0/g\' /opt/zeppelin/conf/zeppelin-site.xml')
+            conn.sudo('mkdir /var/log/zeppelin')
+            conn.sudo('mkdir /var/run/zeppelin')
+            conn.sudo('ln -s /var/log/zeppelin /opt/zeppelin-' + zeppelin_version + '-bin-netinst/logs')
+            conn.sudo('chown ' + os_user + ':' + os_user + ' -R /var/log/zeppelin')
+            conn.sudo('ln -s /var/run/zeppelin /opt/zeppelin-' + zeppelin_version + '-bin-netinst/run')
+            conn.sudo('chown ' + os_user + ':' + os_user + ' -R /var/run/zeppelin')
+            conn.sudo('/opt/zeppelin/bin/install-interpreter.sh --name ' + zeppelin_interpreters + ' --proxy-url $http_proxy')
+            conn.sudo('chown ' + os_user + ':' + os_user + ' -R /opt/zeppelin-' + zeppelin_version + '-bin-netinst')
+            conn.sudo('mkdir -p /opt/zeppelin/lib/interpreter/')
+            conn.sudo('cp /opt/zeppelin-' + zeppelin_version + '-bin-netinst/interpreter/md/zeppelin-markdown-*.jar /opt/zeppelin/lib/interpreter/') # necessary when executing paragraph launches java process with "-cp :/opt/zeppelin/lib/interpreter/*:"
+            conn.sudo('cp /opt/zeppelin-' + zeppelin_version + '-bin-netinst/interpreter/shell/zeppelin-shell-*.jar /opt/zeppelin/lib/interpreter/')
         except:
             sys.exit(1)
         try:
-            put(templates_dir + 'zeppelin-notebook.service', '/tmp/zeppelin-notebook.service')
-            sudo("sed -i 's|OS_USR|" + os_user + "|' /tmp/zeppelin-notebook.service")
-            http_proxy = run('echo $http_proxy')
-            https_proxy = run('echo $https_proxy')
-            sudo('sed -i \'/\[Service\]/ a\Environment=\"HTTP_PROXY={}\"\'  /tmp/zeppelin-notebook.service'.format(
+            conn.put(templates_dir + 'zeppelin-notebook.service', '/tmp/zeppelin-notebook.service')
+            conn.sudo("sed -i 's|OS_USR|" + os_user + "|' /tmp/zeppelin-notebook.service")
+            http_proxy = conn.run('echo $http_proxy')
+            https_proxy = conn.run('echo $https_proxy')
+            conn.sudo('sed -i \'/\[Service\]/ a\Environment=\"HTTP_PROXY={}\"\'  /tmp/zeppelin-notebook.service'.format(
                 http_proxy))
-            sudo('sed -i \'/\[Service\]/ a\Environment=\"HTTPS_PROXY={}\"\'  /tmp/zeppelin-notebook.service'.format(
+            conn.sudo('sed -i \'/\[Service\]/ a\Environment=\"HTTPS_PROXY={}\"\'  /tmp/zeppelin-notebook.service'.format(
                 https_proxy))
-            sudo("chmod 644 /tmp/zeppelin-notebook.service")
-            sudo('cp /tmp/zeppelin-notebook.service /etc/systemd/system/zeppelin-notebook.service')
-            sudo('chown ' + os_user + ':' + os_user + ' -R /opt/zeppelin/')
-            sudo('mkdir -p /mnt/var')
-            sudo('chown ' + os_user + ':' + os_user + ' /mnt/var')
-            sudo("systemctl daemon-reload")
-            sudo("systemctl enable zeppelin-notebook")
-            sudo('echo \"d /var/run/zeppelin 0755 ' + os_user + '\" > /usr/lib/tmpfiles.d/zeppelin.conf')
-            sudo('touch /home/' + os_user + '/.ensure_dir/zeppelin_ensured')
+            conn.sudo("chmod 644 /tmp/zeppelin-notebook.service")
+            conn.sudo('cp /tmp/zeppelin-notebook.service /etc/systemd/system/zeppelin-notebook.service')
+            conn.sudo('chown ' + os_user + ':' + os_user + ' -R /opt/zeppelin/')
+            conn.sudo('mkdir -p /mnt/var')
+            conn.sudo('chown ' + os_user + ':' + os_user + ' /mnt/var')
+            conn.sudo("systemctl daemon-reload")
+            conn.sudo("systemctl enable zeppelin-notebook")
+            conn.sudo('echo \"d /var/run/zeppelin 0755 ' + os_user + '\" > /usr/lib/tmpfiles.d/zeppelin.conf')
+            conn.sudo('touch /home/' + os_user + '/.ensure_dir/zeppelin_ensured')
         except:
             sys.exit(1)
 
@@ -131,67 +131,67 @@ def configure_local_livy_kernels(args):
         port_number_found = False
         default_port = 8998
         livy_port = ''
-        put(templates_dir + 'interpreter_livy.json', '/tmp/interpreter.json')
-        sudo('sed -i "s|ENDPOINTURL|' + args.endpoint_url + '|g" /tmp/interpreter.json')
-        sudo('sed -i "s|OS_USER|' + args.os_user + '|g" /tmp/interpreter.json')
+        conn.put(templates_dir + 'interpreter_livy.json', '/tmp/interpreter.json')
+        conn.sudo('sed -i "s|ENDPOINTURL|' + args.endpoint_url + '|g" /tmp/interpreter.json')
+        conn.sudo('sed -i "s|OS_USER|' + args.os_user + '|g" /tmp/interpreter.json')
         spark_memory = get_spark_memory()
-        sudo('sed -i "s|DRIVER_MEMORY|{}m|g" /tmp/interpreter.json'.format(spark_memory))
+        conn.sudo('sed -i "s|DRIVER_MEMORY|{}m|g" /tmp/interpreter.json'.format(spark_memory))
         while not port_number_found:
-            port_free = sudo('nmap -p ' + str(default_port) + ' localhost | grep "closed" > /dev/null; echo $?')
+            port_free = conn.sudo('nmap -p ' + str(default_port) + ' localhost | grep "closed" > /dev/null; echo $?')
             port_free = port_free[:1]
             if port_free == '0':
                 livy_port = default_port
                 port_number_found = True
             else:
                 default_port += 1
-        sudo('sed -i "s|LIVY_PORT|' + str(livy_port) + '|g" /tmp/interpreter.json')
+        conn.sudo('sed -i "s|LIVY_PORT|' + str(livy_port) + '|g" /tmp/interpreter.json')
         update_zeppelin_interpreters(args.multiple_clusters, r_enabled, 'local')
-        sudo('cp -f /tmp/interpreter.json /opt/zeppelin/conf/interpreter.json')
-        sudo('echo "livy.server.port = ' + str(livy_port) + '" >> /opt/livy/conf/livy.conf')
-        sudo('''echo "SPARK_HOME='/opt/spark/'" >> /opt/livy/conf/livy-env.sh''')
+        conn.sudo('cp -f /tmp/interpreter.json /opt/zeppelin/conf/interpreter.json')
+        conn.sudo('echo "livy.server.port = ' + str(livy_port) + '" >> /opt/livy/conf/livy.conf')
+        conn.sudo('''echo "SPARK_HOME='/opt/spark/'" >> /opt/livy/conf/livy-env.sh''')
         if exists('/opt/livy/conf/spark-blacklist.conf'):
-            sudo('sed -i "s/^/#/g" /opt/livy/conf/spark-blacklist.conf')
-        sudo("systemctl start livy-server")
-        sudo('chown ' + args.os_user + ':' + args.os_user + ' -R /opt/zeppelin/')
-        sudo('touch /home/' + args.os_user + '/.ensure_dir/local_livy_kernel_ensured')
-    sudo("systemctl daemon-reload")
-    sudo("systemctl start zeppelin-notebook")
+            conn.sudo('sed -i "s/^/#/g" /opt/livy/conf/spark-blacklist.conf')
+        conn.sudo("systemctl start livy-server")
+        conn.sudo('chown ' + args.os_user + ':' + args.os_user + ' -R /opt/zeppelin/')
+        conn.sudo('touch /home/' + args.os_user + '/.ensure_dir/local_livy_kernel_ensured')
+    conn.sudo("systemctl daemon-reload")
+    conn.sudo("systemctl start zeppelin-notebook")
 
 
 def configure_local_spark_kernels(args):
     if not exists('/home/' + args.os_user + '/.ensure_dir/local_spark_kernel_ensured'):
-        put(templates_dir + 'interpreter_spark.json', '/tmp/interpreter.json')
-        sudo('sed -i "s|ENDPOINTURL|' + args.endpoint_url + '|g" /tmp/interpreter.json')
-        sudo('sed -i "s|OS_USER|' + args.os_user + '|g" /tmp/interpreter.json')
+        conn.put(templates_dir + 'interpreter_spark.json', '/tmp/interpreter.json')
+        conn.sudo('sed -i "s|ENDPOINTURL|' + args.endpoint_url + '|g" /tmp/interpreter.json')
+        conn.sudo('sed -i "s|OS_USER|' + args.os_user + '|g" /tmp/interpreter.json')
         spark_memory = get_spark_memory()
-        sudo('sed -i "s|DRIVER_MEMORY|{}m|g" /tmp/interpreter.json'.format(spark_memory))
+        conn.sudo('sed -i "s|DRIVER_MEMORY|{}m|g" /tmp/interpreter.json'.format(spark_memory))
         update_zeppelin_interpreters(args.multiple_clusters, r_enabled, 'local')
-        sudo('cp -f /tmp/interpreter.json /opt/zeppelin/conf/interpreter.json')
-        sudo('chown ' + args.os_user + ':' + args.os_user + ' -R /opt/zeppelin/')
-        sudo('touch /home/' + args.os_user + '/.ensure_dir/local_spark_kernel_ensured')
-    sudo("systemctl daemon-reload")
-    sudo("systemctl start zeppelin-notebook")
+        conn.sudo('cp -f /tmp/interpreter.json /opt/zeppelin/conf/interpreter.json')
+        conn.sudo('chown ' + args.os_user + ':' + args.os_user + ' -R /opt/zeppelin/')
+        conn.sudo('touch /home/' + args.os_user + '/.ensure_dir/local_spark_kernel_ensured')
+    conn.sudo("systemctl daemon-reload")
+    conn.sudo("systemctl start zeppelin-notebook")
 
 
 def install_local_livy(args):
     if not exists('/home/' + args.os_user + '/.ensure_dir/local_livy_ensured'):
-        sudo('wget http://archive.cloudera.com/beta/livy/livy-server-' + args.livy_version + '.zip -O /opt/livy-server-'
+        conn.sudo('wget http://archive.cloudera.com/beta/livy/livy-server-' + args.livy_version + '.zip -O /opt/livy-server-'
              + args.livy_version + '.zip')
-        sudo('unzip /opt/livy-server-' + args.livy_version + '.zip -d /opt/')
-        sudo('mv /opt/livy-server-' + args.livy_version + '/ /opt/livy/')
-        sudo('mkdir -p /var/run/livy')
-        sudo('mkdir -p /opt/livy/logs')
-        sudo('chown ' + args.os_user + ':' + args.os_user + ' -R /var/run/livy')
-        sudo('chown ' + args.os_user + ':' + args.os_user + ' -R /opt/livy/')
-        put(templates_dir + 'livy-server-cluster.service', '/tmp/livy-server-cluster.service')
-        sudo('mv /tmp/livy-server-cluster.service /opt/')
-        put(templates_dir + 'livy-server.service', '/tmp/livy-server.service')
-        sudo("sed -i 's|OS_USER|" + args.os_user + "|' /tmp/livy-server.service")
-        sudo("chmod 644 /tmp/livy-server.service")
-        sudo('cp /tmp/livy-server.service /etc/systemd/system/livy-server.service')
-        sudo("systemctl daemon-reload")
-        sudo("systemctl enable livy-server")
-        sudo('touch /home/' + args.os_user + '/.ensure_dir/local_livy_ensured')
+        conn.sudo('unzip /opt/livy-server-' + args.livy_version + '.zip -d /opt/')
+        conn.sudo('mv /opt/livy-server-' + args.livy_version + '/ /opt/livy/')
+        conn.sudo('mkdir -p /var/run/livy')
+        conn.sudo('mkdir -p /opt/livy/logs')
+        conn.sudo('chown ' + args.os_user + ':' + args.os_user + ' -R /var/run/livy')
+        conn.sudo('chown ' + args.os_user + ':' + args.os_user + ' -R /opt/livy/')
+        conn.put(templates_dir + 'livy-server-cluster.service', '/tmp/livy-server-cluster.service')
+        conn.sudo('mv /tmp/livy-server-cluster.service /opt/')
+        conn.put(templates_dir + 'livy-server.service', '/tmp/livy-server.service')
+        conn.sudo("sed -i 's|OS_USER|" + args.os_user + "|' /tmp/livy-server.service")
+        conn.sudo("chmod 644 /tmp/livy-server.service")
+        conn.sudo('cp /tmp/livy-server.service /etc/systemd/system/livy-server.service')
+        conn.sudo("systemctl daemon-reload")
+        conn.sudo("systemctl enable livy-server")
+        conn.sudo('touch /home/' + args.os_user + '/.ensure_dir/local_livy_ensured')
 
 
 ##############
@@ -206,7 +206,7 @@ if __name__ == "__main__":
     print("Prepare .ensure directory")
     try:
         if not exists('/home/' + args.os_user + '/.ensure_dir'):
-            sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
+            conn.sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
     except:
         sys.exit(1)
     print("Mount additional volume")
@@ -255,7 +255,7 @@ if __name__ == "__main__":
     if exists('/home/{0}/{1}'.format(args.os_user, gitlab_certfile)):
         install_gitlab_cert(args.os_user, gitlab_certfile)
     # COPY PRE-COMMIT SCRIPT TO ZEPPELIN
-    sudo('cp /home/{}/.git/templates/hooks/pre-commit /opt/zeppelin/notebook/.git/hooks/'.format(args.os_user))
+    conn.sudo('cp /home/{}/.git/templates/hooks/pre-commit /opt/zeppelin/notebook/.git/hooks/'.format(args.os_user))
 
     # INSTALL INACTIVITY CHECKER
     print("Install inactivity checker")
diff --git a/services/self-service/src/main/java/com/epam/datalab/backendapi/dao/RequestIdDAO.java b/services/self-service/src/main/java/com/epam/datalab/backendapi/dao/RequestIdDAO.java
index aa83887..c75f69a 100644
--- a/services/self-service/src/main/java/com/epam/datalab/backendapi/dao/RequestIdDAO.java
+++ b/services/self-service/src/main/java/com/epam/datalab/backendapi/dao/RequestIdDAO.java
@@ -46,7 +46,7 @@ public class RequestIdDAO extends BaseDAO {
         return opt.get();
     }
 
-    public void put(RequestIdDTO requestId) {
+    public void conn.put(RequestIdDTO requestId) {
         getCollection(REQUEST_ID)
                 .insertOne(convertToBson(requestId));
     }
diff --git a/services/self-service/src/main/java/com/epam/datalab/backendapi/domain/RequestId.java b/services/self-service/src/main/java/com/epam/datalab/backendapi/domain/RequestId.java
index f81f1e0..9fc4205 100644
--- a/services/self-service/src/main/java/com/epam/datalab/backendapi/domain/RequestId.java
+++ b/services/self-service/src/main/java/com/epam/datalab/backendapi/domain/RequestId.java
@@ -50,7 +50,7 @@ public class RequestId {
      * @param username the name of user.
      * @param uuid     UUID.
      */
-    public String put(String username, String uuid) {
+    public String conn.put(String username, String uuid) {
         LOGGER.trace("Register request id {} for user {}", uuid, username);
         dao.put(new RequestIdDTO()
                 .withId(uuid)
@@ -67,7 +67,7 @@ public class RequestId {
      * @return new UUID
      */
     public String get(String username) {
-        return put(UUID.randomUUID().toString(), username);
+        return conn.put(UUID.randomUUID().toString(), username);
     }
 
     /**


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org