You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@datalab.apache.org by lf...@apache.org on 2021/02/04 13:46:14 UTC

[incubator-datalab] branch DATALAB-2091-fab2 updated: [DATALAB-2091]: partially replaced local with subprocess.run(

This is an automated email from the ASF dual-hosted git repository.

lfrolov pushed a commit to branch DATALAB-2091-fab2
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git


The following commit(s) were added to refs/heads/DATALAB-2091-fab2 by this push:
     new 34cf9c2  [DATALAB-2091]: partially replaced local with subprocess.run(
34cf9c2 is described below

commit 34cf9c232419cbb6fa64df918af1e38eae90222a
Author: leonidfrolov <fr...@gmail.com>
AuthorDate: Thu Feb 4 15:45:55 2021 +0200

    [DATALAB-2091]: partially replaced local with subprocess.run(
---
 .../scripts/deploy_datalab.py                      |  13 +-
 .../scripts/update_amazon_repositories.py          |   4 +-
 .../scripts/post-deployment_configuration.py       | 182 +++++++++++----------
 .../src/base/scripts/install_user_key.py           |   2 +-
 .../src/general/api/check_inactivity.py            |   2 +-
 .../src/general/api/configure.py                   |   3 +-
 .../src/general/api/create.py                      |   3 +-
 .../src/general/api/create_image.py                |   2 +-
 .../src/general/api/git_creds.py                   |   2 +-
 .../src/general/api/install_libs.py                |   3 +-
 .../src/general/api/list_libs.py                   |   3 +-
 .../src/general/api/reconfigure_spark.py           |   3 +-
 .../src/general/api/recreate.py                    |   2 +-
 .../src/general/api/reupload_key.py                |   2 +-
 .../src/general/api/start.py                       |   2 +-
 .../src/general/api/status.py                      |   2 +-
 .../src/general/api/stop.py                        |   2 +-
 .../src/general/api/terminate.py                   |   2 +-
 .../src/general/api/terminate_image.py             |   2 +-
 .../src/general/lib/aws/actions_lib.py             |  28 ++--
 .../src/general/lib/azure/actions_lib.py           |   8 +-
 .../src/general/lib/gcp/actions_lib.py             |  18 +-
 .../src/general/lib/os/debian/common_lib.py        |   7 +-
 .../src/general/lib/os/redhat/common_lib.py        |   6 +-
 .../src/general/lib/os/redhat/notebook_lib.py      |  12 +-
 .../jupyter_dataengine-service_create_configs.py   |   2 +-
 .../jupyter_dataengine-service_create_configs.py   |   2 +-
 .../os/deeplearning_dataengine_create_configs.py   |   4 +-
 .../general/scripts/os/get_list_available_pkgs.py  |   3 +-
 .../os/jupyter_dataengine_create_configs.py        |   8 +-
 .../scripts/os/tensor_dataengine_create_configs.py |   4 +-
 .../os/zeppelin_dataengine_create_configs.py       |  97 +++++------
 .../src/ssn/scripts/configure_ui.py                |  31 ++--
 .../src/ssn/scripts/docker_build.py                |   6 +-
 .../scenario_deeplearning/deeplearning_tests.py    |  21 +--
 .../examples/scenario_zeppelin/zeppelin_tests.py   |   2 +-
 36 files changed, 255 insertions(+), 240 deletions(-)

diff --git a/infrastructure-provisioning/scripts/deploy_datalab.py b/infrastructure-provisioning/scripts/deploy_datalab.py
index 8f63fc1..89c130c 100644
--- a/infrastructure-provisioning/scripts/deploy_datalab.py
+++ b/infrastructure-provisioning/scripts/deploy_datalab.py
@@ -23,6 +23,7 @@
 
 import argparse
 import os
+import subprocess
 from fabric import *
 from invoke import task
 
@@ -198,23 +199,23 @@ def generate_docker_command():
 
 def build_docker_images(args):
     # Building base and ssn docker images
-    c.local('cd {2}; sudo docker build --build-arg OS={0} --build-arg SRC_PATH="infrastructure-provisioning/src/" --file '
+    subprocess.run('cd {2}; sudo docker build --build-arg OS={0} --build-arg SRC_PATH="infrastructure-provisioning/src/" --file '
               'infrastructure-provisioning/src/general/files/{1}/'
-              'base_Dockerfile -t docker.datalab-base .'.format(args.conf_os_family, args.conf_cloud_provider, args.workspace_path))
-    c.local('cd {2}; sudo docker build --build-arg OS={0} --file infrastructure-provisioning/src/general/files/{1}/'
-              'ssn_Dockerfile -t docker.datalab-ssn .'.format(args.conf_os_family, args.conf_cloud_provider, args.workspace_path))
+              'base_Dockerfile -t docker.datalab-base .'.format(args.conf_os_family, args.conf_cloud_provider, args.workspace_path), shell=True)
+    subprocess.run('cd {2}; sudo docker build --build-arg OS={0} --file infrastructure-provisioning/src/general/files/{1}/'
+              'ssn_Dockerfile -t docker.datalab-ssn .'.format(args.conf_os_family, args.conf_cloud_provider, args.workspace_path), shell=True)
 
 
 def deploy_datalab(args):
     # Creating SSN node
     docker_command = generate_docker_command()
-    c.local(docker_command)
+    subprocess.run(docker_command, shell=True)
 
 
 def terminate_datalab(args):
     # Dropping datalab environment with selected infrastructure tag
     docker_command = generate_docker_command()
-    c.local(docker_command)
+    subprocess.run(docker_command, shell=True)
 
 
 if __name__ == "__main__":
diff --git a/infrastructure-provisioning/scripts/deploy_repository/scripts/update_amazon_repositories.py b/infrastructure-provisioning/scripts/deploy_repository/scripts/update_amazon_repositories.py
index 7d2ba04..18c9f00 100644
--- a/infrastructure-provisioning/scripts/deploy_repository/scripts/update_amazon_repositories.py
+++ b/infrastructure-provisioning/scripts/deploy_repository/scripts/update_amazon_repositories.py
@@ -36,9 +36,9 @@ if __name__ == "__main__":
     local('wget http://repo.{}.amazonaws.com/2017.09/updates/mirror.list -O /tmp/updates_mirror.list'.format(
         args.region))
     amazon_main_repo = local("cat /tmp/main_mirror.list  | grep {} | sed 's/$basearch//g'".format(args.region),
-                             capture=True)
+                             capture_output=True)
     amazon_updates_repo = local("cat /tmp/updates_mirror.list  | grep {} | sed 's/$basearch//g'".format(args.region),
-                                capture=True)
+                                capture_output=True)
     local('cp -f /opt/nexus/updateRepositories.groovy /tmp/updateRepositories.groovy')
     local('sed -i "s|AMAZON_MAIN_URL|{}|g" /tmp/updateRepositories.groovy'.format(amazon_main_repo))
     local('sed -i "s|AMAZON_UPDATES_URL|{}|g" /tmp/updateRepositories.groovy'.format(amazon_updates_repo))
diff --git a/infrastructure-provisioning/scripts/post-deployment_configuration.py b/infrastructure-provisioning/scripts/post-deployment_configuration.py
index 9780394..1e647a3 100644
--- a/infrastructure-provisioning/scripts/post-deployment_configuration.py
+++ b/infrastructure-provisioning/scripts/post-deployment_configuration.py
@@ -26,6 +26,7 @@ import requests
 import uuid
 from Crypto.PublicKey import RSA
 from fabric import *
+import subprocess
 
 if __name__ == "__main__":
 
@@ -50,139 +51,142 @@ if __name__ == "__main__":
     datalab_ssn_static_ip_name = datalab_sbn + '-ip'
     datalab_zone = requests.get('http://metadata/computeMetadata/v1/instance/zone', headers=headers).text.split('/')[-1]
     datalab_region = '-'.join(datalab_zone.split('-', 2)[:2])
-    deployment_vpcId = local(
+    deployment_vpcId = subprocess.run(
         "sudo gcloud compute instances describe {0} --zone {1} --format 'value(networkInterfaces.network)' | sed 's|.*/||'".format(
-            datalab_sbn, datalab_zone), capture=True)
-    deployment_subnetId = local(
+            datalab_sbn, datalab_zone), capture_output=True, shell=True)
+    deployment_subnetId = subprocess.run(
         "sudo gcloud compute instances describe {0} --zone {1} --format 'value(networkInterfaces.subnetwork)' | sed 's|.*/||'".format(
-            datalab_sbn, datalab_zone), capture=True)
+            datalab_sbn, datalab_zone), capture_output=True, shell=True)
     gcp_projectId = requests.get('http://metadata/computeMetadata/v1/project/project-id', headers=headers).text
     keycloak_redirectUri = 'http://{}'.format(server_external_ip)
 
     print("Generationg SSH keyfile for datalab-user")
     key = RSA.generate(2048)
-    local("sudo sh -c 'echo \"{}\" > /home/datalab-user/keys/KEY-FILE.pem'".format(key.exportKey('PEM')))
-    local("sudo chmod 600 /home/datalab-user/keys/KEY-FILE.pem")
+    subprocess.run("sudo sh -c 'echo \"{}\" > /home/datalab-user/keys/KEY-FILE.pem'".format(key.exportKey('PEM')), shell=True)
+    subprocess.run("sudo chmod 600 /home/datalab-user/keys/KEY-FILE.pem", shell=True)
     pubkey = key.publickey()
-    local("sudo sh -c 'echo \"{}\" > /home/datalab-user/.ssh/authorized_keys'".format(pubkey.exportKey('OpenSSH')))
+    subprocess.run("sudo sh -c 'echo \"{}\" > /home/datalab-user/.ssh/authorized_keys'".format(pubkey.exportKey('OpenSSH')), shell=True)
 
     print("Generationg MongoDB password")
     mongo_pwd = uuid.uuid4().hex
     try:
-        local(
+        subprocess.run(
             "sudo echo -e 'db.changeUserPassword(\"admin\", \"{}\")' | mongo datalabdb --port 27017 -u admin -p MONGO_PASSWORD".format(
-                mongo_pwd))
-        local('sudo sed -i "s|MONGO_PASSWORD|{}|g" /opt/datalab/conf/billing.yml'.format(mongo_pwd))
+                mongo_pwd), shell=True)
+        subprocess.run('sudo sed -i "s|MONGO_PASSWORD|{}|g" /opt/datalab/conf/billing.yml'.format(mongo_pwd), shell=True)
 
-        local('sudo sed -i "s|MONGO_PASSWORD|{}|g" /opt/datalab/conf/ssn.yml'.format(mongo_pwd))
+        subprocess.run('sudo sed -i "s|MONGO_PASSWORD|{}|g" /opt/datalab/conf/ssn.yml'.format(mongo_pwd), shell=True)
     except:
         print('Mongo password was already changed')
 
     print('Reserving external IP')
-    static_address_exist = local(
-        "sudo gcloud compute addresses list --filter='address={}'".format(server_external_ip), capture=True)
+    static_address_exist = subprocess.run(
+        "sudo gcloud compute addresses list --filter='address={}'".format(server_external_ip), capture_output=True, shell=True)
     if static_address_exist:
         print('Address is already static')
     else:
-        local("sudo gcloud compute addresses create {0} --addresses {1} --region {2}".format(datalab_ssn_static_ip_name,
+        subprocess.run("sudo gcloud compute addresses create {0} --addresses {1} --region {2}".format(datalab_ssn_static_ip_name,
                                                                                              server_external_ip,
                                                                                              datalab_region),
-              capture=True)
+              capture_output=True, shell=True)
 
     print("Overwriting SSN parameters")
 
     if deployment_subnetId == 'default':
-        local(
-            'sudo sed -i "s|# user_subnets_range|user_subnets_range|g" /opt/datalab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini')
-
-    local('sudo sed -i "s|DATALAB_SBN|{}|g" /opt/datalab/conf/self-service.yml'.format(datalab_sbn))
-    local('sudo sed -i "s|KEYCLOAK_REDIRECTURI|{}|g" /opt/datalab/conf/self-service.yml'.format(keycloak_redirectUri))
-    local(
-        'sudo sed -i "s|KEYCLOAK_REALM_NAME|{}|g" /opt/datalab/conf/self-service.yml'.format(args.keycloak_realm_name))
-    local('sudo sed -i "s|KEYCLOAK_AUTH_SERVER_URL|{}|g" /opt/datalab/conf/self-service.yml'.format(
-        args.keycloak_auth_server_url))
-    local('sudo sed -i "s|KEYCLOAK_CLIENT_NAME|{}|g" /opt/datalab/conf/self-service.yml'.format(
-        args.keycloak_client_name))
-    local('sudo sed -i "s|KEYCLOAK_CLIENT_SECRET|{}|g" /opt/datalab/conf/self-service.yml'.format(
-        args.keycloak_client_secret))
-
-    local(
-        'sudo sed -i "s|KEYCLOAK_REALM_NAME|{}|g" /opt/datalab/conf/provisioning.yml'.format(args.keycloak_realm_name))
-    local('sudo sed -i "s|KEYCLOAK_AUTH_SERVER_URL|{}|g" /opt/datalab/conf/provisioning.yml'.format(
-        args.keycloak_auth_server_url))
-    local('sudo sed -i "s|KEYCLOAK_CLIENT_NAME|{}|g" /opt/datalab/conf/provisioning.yml'.format(
-        args.keycloak_client_name))
-    local('sudo sed -i "s|KEYCLOAK_CLIENT_SECRET|{}|g" /opt/datalab/conf/provisioning.yml'.format(
-        args.keycloak_client_secret))
-    local('sudo sed -i "s|DATALAB_SBN|{}|g" /opt/datalab/conf/provisioning.yml'.format(datalab_sbn))
-    local('sudo sed -i "s|SUBNET_ID|{}|g" /opt/datalab/conf/provisioning.yml'.format(deployment_subnetId))
-    local('sudo sed -i "s|DATALAB_REGION|{}|g" /opt/datalab/conf/provisioning.yml'.format(datalab_region))
-    local('sudo sed -i "s|DATALAB_ZONE|{}|g" /opt/datalab/conf/provisioning.yml'.format(datalab_zone))
-    local('sudo sed -i "s|SSN_VPC_ID|{}|g" /opt/datalab/conf/provisioning.yml'.format(deployment_vpcId))
-    local('sudo sed -i "s|GCP_PROJECT_ID|{}|g" /opt/datalab/conf/provisioning.yml'.format(gcp_projectId))
-    local('sudo sed -i "s|KEYCLOAK_USER|{}|g" /opt/datalab/conf/provisioning.yml'.format(args.keycloak_user))
-    local('sudo sed -i "s|KEYCLOAK_ADMIN_PASSWORD|{}|g" /opt/datalab/conf/provisioning.yml'.format(
-        args.keycloak_admin_password))
-
-    local('sudo sed -i "s|DATALAB_SBN|{}|g" /opt/datalab/conf/billing.yml'.format(datalab_sbn))
-
-    local(
+        subprocess.run(
+            'sudo sed -i "s|# user_subnets_range|user_subnets_range|g" /opt/datalab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini', shell=True)
+
+    subprocess.run('sudo sed -i "s|DATALAB_SBN|{}|g" /opt/datalab/conf/self-service.yml'.format(datalab_sbn), shell=True)
+    subprocess.run('sudo sed -i "s|KEYCLOAK_REDIRECTURI|{}|g" /opt/datalab/conf/self-service.yml'.format(keycloak_redirectUri), shell=True)
+    subprocess.run(
+        'sudo sed -i "s|KEYCLOAK_REALM_NAME|{}|g" /opt/datalab/conf/self-service.yml'.format(args.keycloak_realm_name), shell=True)
+    subprocess.run('sudo sed -i "s|KEYCLOAK_AUTH_SERVER_URL|{}|g" /opt/datalab/conf/self-service.yml'.format(
+        args.keycloak_auth_server_url), shell=True)
+    subprocess.run('sudo sed -i "s|KEYCLOAK_CLIENT_NAME|{}|g" /opt/datalab/conf/self-service.yml'.format(
+        args.keycloak_client_name), shell=True)
+    subprocess.run('sudo sed -i "s|KEYCLOAK_CLIENT_SECRET|{}|g" /opt/datalab/conf/self-service.yml'.format(
+        args.keycloak_client_secret), shell=True)
+
+    subprocess.run(
+        'sudo sed -i "s|KEYCLOAK_REALM_NAME|{}|g" /opt/datalab/conf/provisioning.yml'.format(args.keycloak_realm_name), shell=True)
+    subprocess.run('sudo sed -i "s|KEYCLOAK_AUTH_SERVER_URL|{}|g" /opt/datalab/conf/provisioning.yml'.format(
+        args.keycloak_auth_server_url), shell=True)
+    subprocess.run('sudo sed -i "s|KEYCLOAK_CLIENT_NAME|{}|g" /opt/datalab/conf/provisioning.yml'.format(
+        args.keycloak_client_name), shell=True)
+    subprocess.run('sudo sed -i "s|KEYCLOAK_CLIENT_SECRET|{}|g" /opt/datalab/conf/provisioning.yml'.format(
+        args.keycloak_client_secret), shell=True)
+    subprocess.run('sudo sed -i "s|DATALAB_SBN|{}|g" /opt/datalab/conf/provisioning.yml'.format(datalab_sbn), shell=True)
+    subprocess.run('sudo sed -i "s|SUBNET_ID|{}|g" /opt/datalab/conf/provisioning.yml'.format(deployment_subnetId), shell=True)
+    subprocess.run('sudo sed -i "s|DATALAB_REGION|{}|g" /opt/datalab/conf/provisioning.yml'.format(datalab_region), shell=True)
+    subprocess.run('sudo sed -i "s|DATALAB_ZONE|{}|g" /opt/datalab/conf/provisioning.yml'.format(datalab_zone), shell=True)
+    subprocess.run('sudo sed -i "s|SSN_VPC_ID|{}|g" /opt/datalab/conf/provisioning.yml'.format(deployment_vpcId), shell=True)
+    subprocess.run('sudo sed -i "s|GCP_PROJECT_ID|{}|g" /opt/datalab/conf/provisioning.yml'.format(gcp_projectId), shell=True)
+    subprocess.run('sudo sed -i "s|KEYCLOAK_USER|{}|g" /opt/datalab/conf/provisioning.yml'.format(args.keycloak_user), shell=True)
+    subprocess.run('sudo sed -i "s|KEYCLOAK_ADMIN_PASSWORD|{}|g" /opt/datalab/conf/provisioning.yml'.format(
+        args.keycloak_admin_password), shell=True)
+
+    subprocess.run('sudo sed -i "s|DATALAB_SBN|{}|g" /opt/datalab/conf/billing.yml'.format(datalab_sbn), shell=True)
+
+    subprocess.run(
         'sudo sed -i "s|DATALAB_SBN|{}|g" /opt/datalab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(
-            datalab_sbn))
-    local(
+            datalab_sbn), shell=True)
+    subprocess.run(
         'sudo sed -i "s|GCP_PROJECT_ID|{}|g" /opt/datalab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(
-            gcp_projectId))
-    local(
+            gcp_projectId), shell=True)
+    subprocess.run(
         'sudo sed -i "s|DATALAB_REGION|{}|g" /opt/datalab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(
-            datalab_region))
-    local(
+            datalab_region), shell=True)
+    subprocess.run(
         'sudo sed -i "s|DATALAB_ZONE|{}|g" /opt/datalab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(
-            datalab_zone))
-    local(
+            datalab_zone), shell=True)
+    subprocess.run(
         'sudo sed -i "s|KEYCLOAK_REALM_NAME|{}|g" /opt/datalab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(
-            args.keycloak_realm_name))
-    local(
+            args.keycloak_realm_name), shell=True)
+    subprocess.run(
         'sudo sed -i "s|KEYCLOAK_AUTH_SERVER_URL|{}|g" /opt/datalab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(
-            args.keycloak_auth_server_url))
-    local(
+            args.keycloak_auth_server_url), shell=True)
+    subprocess.run(
         'sudo sed -i "s|KEYCLOAK_CLIENT_NAME|{}|g" /opt/datalab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(
-            args.keycloak_client_name))
-    local(
+            args.keycloak_client_name), shell=True)
+    subprocess.run(
         'sudo sed -i "s|KEYCLOAK_CLIENT_SECRET|{}|g" /opt/datalab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(
-            args.keycloak_client_secret))
-    local(
+            args.keycloak_client_secret), shell=True)
+    subprocess.run(
         'sudo sed -i "s|KEYCLOAK_USER|{}|g" /opt/datalab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(
-            args.keycloak_user))
-    local(
+            args.keycloak_user), shell=True)
+    subprocess.run(
         'sudo sed -i "s|KEYCLOAK_ADMIN_PASSWORD|{}|g" /opt/datalab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(
-            args.keycloak_admin_password))
+            args.keycloak_admin_password), shell=True)
 
     print('SSL certificate generating')
     keystore_passwd = uuid.uuid4().hex
-    local('sudo rm /home/datalab-user/keys/ssn*')
-    local('sudo rm /etc/ssl/certs/datalab*')
-    local(
-        'sudo keytool -delete -noprompt -trustcacerts -alias ssn -storepass changeit -keystore /usr/lib/jvm/java-8-openjdk-amd64/jre/lib/security/cacerts')
-    local(
-        'sudo openssl req -x509 -nodes -days 3650 -newkey rsa:2048 -keyout /etc/ssl/certs/datalab.key -out /etc/ssl/certs/datalab.crt -subj "/C=US/ST=US/L=US/O=datalab/CN=localhost/subjectAltName={0}"'.format(
-            server_external_ip))
-    local(
-        'sudo openssl pkcs12 -export -in /etc/ssl/certs/datalab.crt -inkey /etc/ssl/certs/datalab.key -name ssn -out /home/datalab-user/keys/ssn.p12 -password pass:{0}'.format(
-            keystore_passwd))
-    local(
-        'sudo keytool -importkeystore -srckeystore /home/datalab-user/keys/ssn.p12 -srcstoretype PKCS12 -alias ssn -destkeystore /home/datalab-user/keys/ssn.keystore.jks -deststorepass {0} -srcstorepass {0}'.format(
-            keystore_passwd))
-    local(
-        'sudo keytool -importcert -trustcacerts -alias ssn -file /etc/ssl/certs/datalab.crt -noprompt -storepass changeit -keystore /usr/lib/jvm/java-8-openjdk-amd64/jre/lib/security/cacerts')
-    local('sudo sed -i "s|KEYSTORE_PASSWORD|{}|g" /opt/datalab/conf/ssn.yml'.format(keystore_passwd))
+    subprocess.run('sudo rm /home/datalab-user/keys/ssn*', shell=True)
+    subprocess.run('sudo rm /etc/ssl/certs/datalab*', shell=True)
+    subprocess.run('sudo keytool -delete -noprompt -trustcacerts -alias ssn -storepass changeit -keystore '
+        '/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/security/cacerts', shell=True)
+    subprocess.run(
+        'sudo openssl req -x509 -nodes -days 3650 -newkey rsa:2048 -keyout /etc/ssl/certs/datalab.key -out '
+        '/etc/ssl/certs/datalab.crt -subj "/C=US/ST=US/L=US/O=datalab/CN=localhost/subjectAltName={0}"'.format(
+            server_external_ip), shell=True)
+    subprocess.run(
+        'sudo openssl pkcs12 -export -in /etc/ssl/certs/datalab.crt -inkey /etc/ssl/certs/datalab.key -name ssn -out '
+        '/home/datalab-user/keys/ssn.p12 -password pass:{0}'.format(keystore_passwd), shell=True)
+    subprocess.run(
+        'sudo keytool -importkeystore -srckeystore /home/datalab-user/keys/ssn.p12 -srcstoretype PKCS12 -alias '
+        'ssn -destkeystore /home/datalab-user/keys/ssn.keystore.jks -deststorepass {0} -srcstorepass {0}'.format(
+            keystore_passwd), shell=True)
+    subprocess.run(
+        'sudo keytool -importcert -trustcacerts -alias ssn -file /etc/ssl/certs/datalab.crt -noprompt -storepass '
+        'changeit -keystore /usr/lib/jvm/java-8-openjdk-amd64/jre/lib/security/cacerts', shell=True)
+    subprocess.run('sudo sed -i "s|KEYSTORE_PASSWORD|{}|g" /opt/datalab/conf/ssn.yml'.format(keystore_passwd), shell=True)
 
     print('Nginx configuration updating')
-    local('sudo sed -i "s|SERVER_IP|{}|g" /etc/nginx/conf.d/nginx_proxy.conf'.format(server_external_ip))
-    local('sudo systemctl restart nginx')
-    local('sudo supervisorctl restart all')
+    subprocess.run('sudo sed -i "s|SERVER_IP|{}|g" /etc/nginx/conf.d/nginx_proxy.conf'.format(server_external_ip), shell=True)
+    subprocess.run('sudo systemctl restart nginx', shell=True)
+    subprocess.run('sudo supervisorctl restart all', shell=True)
 
     print('Rebuilding docker images')
-    local('cd /opt/datalab/sources/infrastructure-provisioning/src/ && sudo docker-build all')
+    subprocess.run('cd /opt/datalab/sources/infrastructure-provisioning/src/ && sudo docker-build all', shell=True)
 
     print('[SUMMARY]')
     print('Mongo password stored in /opt/datalab/conf/ssn.yml')
diff --git a/infrastructure-provisioning/src/base/scripts/install_user_key.py b/infrastructure-provisioning/src/base/scripts/install_user_key.py
index f9d954a..81a42d0 100644
--- a/infrastructure-provisioning/src/base/scripts/install_user_key.py
+++ b/infrastructure-provisioning/src/base/scripts/install_user_key.py
@@ -37,7 +37,7 @@ args = parser.parse_args()
 
 def copy_key(config):
     admin_key_pub = local('ssh-keygen -y -f {}'.format(args.keyfile),
-                          capture=True)
+                          capture_output=True)
     conn.sudo('rm -f /home/{}/.ssh/authorized_keys'.format(args.user))
     conn.sudo('echo "{0}" >> /home/{1}/.ssh/authorized_keys'.format(admin_key_pub, args.user))
     try:
diff --git a/infrastructure-provisioning/src/general/api/check_inactivity.py b/infrastructure-provisioning/src/general/api/check_inactivity.py
index 04cda89..82f07e9 100644
--- a/infrastructure-provisioning/src/general/api/check_inactivity.py
+++ b/infrastructure-provisioning/src/general/api/check_inactivity.py
@@ -24,7 +24,7 @@
 import json
 import os
 import sys
-from fabric import local
+import subprocess
 
 if __name__ == "__main__":
     success = True
diff --git a/infrastructure-provisioning/src/general/api/configure.py b/infrastructure-provisioning/src/general/api/configure.py
index 046a501..0425f26 100644
--- a/infrastructure-provisioning/src/general/api/configure.py
+++ b/infrastructure-provisioning/src/general/api/configure.py
@@ -24,7 +24,8 @@
 import json
 import os
 import sys
-from fabric import local
+import subprocess
+import subprocess
 
 if __name__ == "__main__":
     success = True
diff --git a/infrastructure-provisioning/src/general/api/create.py b/infrastructure-provisioning/src/general/api/create.py
index 43a12f7..9cf4558 100644
--- a/infrastructure-provisioning/src/general/api/create.py
+++ b/infrastructure-provisioning/src/general/api/create.py
@@ -24,7 +24,8 @@
 import json
 import os
 import sys
-from fabric import local
+import subprocess
+import subprocess
 
 if __name__ == "__main__":
     success = True
diff --git a/infrastructure-provisioning/src/general/api/create_image.py b/infrastructure-provisioning/src/general/api/create_image.py
index 5804132..3589460 100644
--- a/infrastructure-provisioning/src/general/api/create_image.py
+++ b/infrastructure-provisioning/src/general/api/create_image.py
@@ -24,7 +24,7 @@
 import json
 import os
 import sys
-from fabric import local
+import subprocess
 
 if __name__ == "__main__":
     success = True
diff --git a/infrastructure-provisioning/src/general/api/git_creds.py b/infrastructure-provisioning/src/general/api/git_creds.py
index 9d1bbe9..cf9d0ea 100644
--- a/infrastructure-provisioning/src/general/api/git_creds.py
+++ b/infrastructure-provisioning/src/general/api/git_creds.py
@@ -24,7 +24,7 @@
 import json
 import os
 import sys
-from fabric import local
+import subprocess
 
 if __name__ == "__main__":
     success = True
diff --git a/infrastructure-provisioning/src/general/api/install_libs.py b/infrastructure-provisioning/src/general/api/install_libs.py
index 3a6bcc3..ba86646 100644
--- a/infrastructure-provisioning/src/general/api/install_libs.py
+++ b/infrastructure-provisioning/src/general/api/install_libs.py
@@ -24,7 +24,8 @@
 import json
 import os
 import sys
-from fabric import local
+import subprocess
+import subprocess
 
 if __name__ == "__main__":
     success = True
diff --git a/infrastructure-provisioning/src/general/api/list_libs.py b/infrastructure-provisioning/src/general/api/list_libs.py
index 88b955b..7e190a8 100644
--- a/infrastructure-provisioning/src/general/api/list_libs.py
+++ b/infrastructure-provisioning/src/general/api/list_libs.py
@@ -24,7 +24,8 @@
 import json
 import os
 import sys
-from fabric import local
+import subprocess
+import subprocess
 
 if __name__ == "__main__":
     success = True
diff --git a/infrastructure-provisioning/src/general/api/reconfigure_spark.py b/infrastructure-provisioning/src/general/api/reconfigure_spark.py
index b023164..82b7f26 100644
--- a/infrastructure-provisioning/src/general/api/reconfigure_spark.py
+++ b/infrastructure-provisioning/src/general/api/reconfigure_spark.py
@@ -24,7 +24,8 @@
 import json
 import os
 import sys
-from fabric import local
+import subprocess
+import subprocess
 
 if __name__ == "__main__":
     success = True
diff --git a/infrastructure-provisioning/src/general/api/recreate.py b/infrastructure-provisioning/src/general/api/recreate.py
index ae48f51..28eb6ef 100644
--- a/infrastructure-provisioning/src/general/api/recreate.py
+++ b/infrastructure-provisioning/src/general/api/recreate.py
@@ -24,7 +24,7 @@
 import json
 import os
 import sys
-from fabric import local
+import subprocess
 
 if __name__ == "__main__":
     success = True
diff --git a/infrastructure-provisioning/src/general/api/reupload_key.py b/infrastructure-provisioning/src/general/api/reupload_key.py
index 6e2f3df..d77a4e8 100644
--- a/infrastructure-provisioning/src/general/api/reupload_key.py
+++ b/infrastructure-provisioning/src/general/api/reupload_key.py
@@ -24,7 +24,7 @@
 import json
 import os
 import sys
-from fabric import local
+import subprocess
 
 if __name__ == "__main__":
     success = True
diff --git a/infrastructure-provisioning/src/general/api/start.py b/infrastructure-provisioning/src/general/api/start.py
index 4474a67..4a92a2e 100644
--- a/infrastructure-provisioning/src/general/api/start.py
+++ b/infrastructure-provisioning/src/general/api/start.py
@@ -24,7 +24,7 @@
 import json
 import os
 import sys
-from fabric import local
+import subprocess
 
 if __name__ == "__main__":
     success = True
diff --git a/infrastructure-provisioning/src/general/api/status.py b/infrastructure-provisioning/src/general/api/status.py
index 2983035..2bc3ccb 100644
--- a/infrastructure-provisioning/src/general/api/status.py
+++ b/infrastructure-provisioning/src/general/api/status.py
@@ -24,7 +24,7 @@
 import json
 import os
 import sys
-from fabric import local
+import subprocess
 
 if __name__ == "__main__":
     success = True
diff --git a/infrastructure-provisioning/src/general/api/stop.py b/infrastructure-provisioning/src/general/api/stop.py
index a4feb7a..5bd9fe2 100644
--- a/infrastructure-provisioning/src/general/api/stop.py
+++ b/infrastructure-provisioning/src/general/api/stop.py
@@ -24,7 +24,7 @@
 import json
 import os
 import sys
-from fabric import local
+import subprocess
 
 if __name__ == "__main__":
     success = True
diff --git a/infrastructure-provisioning/src/general/api/terminate.py b/infrastructure-provisioning/src/general/api/terminate.py
index df40496..aa975f6 100644
--- a/infrastructure-provisioning/src/general/api/terminate.py
+++ b/infrastructure-provisioning/src/general/api/terminate.py
@@ -24,7 +24,7 @@
 import json
 import os
 import sys
-from fabric import local
+import subprocess
 
 if __name__ == "__main__":
     success = True
diff --git a/infrastructure-provisioning/src/general/api/terminate_image.py b/infrastructure-provisioning/src/general/api/terminate_image.py
index 1e7f67f..384211b 100644
--- a/infrastructure-provisioning/src/general/api/terminate_image.py
+++ b/infrastructure-provisioning/src/general/api/terminate_image.py
@@ -24,7 +24,7 @@
 import json
 import os
 import sys
-from fabric import local
+import subprocess
 
 if __name__ == "__main__":
     success = True
diff --git a/infrastructure-provisioning/src/general/lib/aws/actions_lib.py b/infrastructure-provisioning/src/general/lib/aws/actions_lib.py
index 3f31ae1..4fb1f2d 100644
--- a/infrastructure-provisioning/src/general/lib/aws/actions_lib.py
+++ b/infrastructure-provisioning/src/general/lib/aws/actions_lib.py
@@ -1427,11 +1427,11 @@ def install_emr_spark(args):
                             '/tmp/spark.tar.gz')
     s3_client.download_file(args.bucket, args.project_name + '/' + args.cluster_name + '/spark-checksum.chk',
                             '/tmp/spark-checksum.chk')
-    if 'WARNING' in local('md5sum -c /tmp/spark-checksum.chk', capture=True):
+    if 'WARNING' in local('md5sum -c /tmp/spark-checksum.chk', capture_output=True):
         local('rm -f /tmp/spark.tar.gz')
         s3_client.download_file(args.bucket, args.project_name + '/' + args.cluster_name + '/spark.tar.gz',
                                 '/tmp/spark.tar.gz')
-        if 'WARNING' in local('md5sum -c /tmp/spark-checksum.chk', capture=True):
+        if 'WARNING' in local('md5sum -c /tmp/spark-checksum.chk', capture_output=True):
             print("The checksum of spark.tar.gz is mismatched. It could be caused by aws network issue.")
             sys.exit(1)
     local('sudo tar -zhxvf /tmp/spark.tar.gz -C /opt/' + args.emr_version + '/' + args.cluster_name + '/')
@@ -1442,10 +1442,10 @@ def jars(args, emr_dir):
     s3_client = boto3.client('s3', config=Config(signature_version='s3v4'), region_name=args.region)
     s3_client.download_file(args.bucket, 'jars/' + args.emr_version + '/jars.tar.gz', '/tmp/jars.tar.gz')
     s3_client.download_file(args.bucket, 'jars/' + args.emr_version + '/jars-checksum.chk', '/tmp/jars-checksum.chk')
-    if 'WARNING' in local('md5sum -c /tmp/jars-checksum.chk', capture=True):
+    if 'WARNING' in local('md5sum -c /tmp/jars-checksum.chk', capture_output=True):
         local('rm -f /tmp/jars.tar.gz')
         s3_client.download_file(args.bucket, 'jars/' + args.emr_version + '/jars.tar.gz', '/tmp/jars.tar.gz')
-        if 'WARNING' in local('md5sum -c /tmp/jars-checksum.chk', capture=True):
+        if 'WARNING' in local('md5sum -c /tmp/jars-checksum.chk', capture_output=True):
             print("The checksum of jars.tar.gz is mismatched. It could be caused by aws network issue.")
             sys.exit(1)
     local('tar -zhxvf /tmp/jars.tar.gz -C ' + emr_dir)
@@ -1498,8 +1498,8 @@ def get_gitlab_cert(bucket, certfile):
 def create_aws_config_files(generate_full_config=False):
     try:
         aws_user_dir = os.environ['AWS_DIR']
-        logging.info(local("rm -rf " + aws_user_dir + " 2>&1", capture=True))
-        logging.info(local("mkdir -p " + aws_user_dir + " 2>&1", capture=True))
+        logging.info(local("rm -rf " + aws_user_dir + " 2>&1", capture_output=True))
+        logging.info(local("mkdir -p " + aws_user_dir + " 2>&1", capture_output=True))
 
         with open(aws_user_dir + '/config', 'w') as aws_file:
             aws_file.write("[default]\n")
@@ -1511,8 +1511,8 @@ def create_aws_config_files(generate_full_config=False):
                 aws_file.write("aws_access_key_id = {}\n".format(os.environ['aws_access_key']))
                 aws_file.write("aws_secret_access_key = {}\n".format(os.environ['aws_secret_access_key']))
 
-        logging.info(local("chmod 600 " + aws_user_dir + "/*" + " 2>&1", capture=True))
-        logging.info(local("chmod 550 " + aws_user_dir + " 2>&1", capture=True))
+        logging.info(local("chmod 600 " + aws_user_dir + "/*" + " 2>&1", capture_output=True))
+        logging.info(local("chmod 550 " + aws_user_dir + " 2>&1", capture_output=True))
 
         return True
     except Exception as err:
@@ -1736,7 +1736,7 @@ def configure_zeppelin_emr_interpreter(emr_version, cluster_name, region, spark_
         local('sudo service zeppelin-notebook start')
         while not zeppelin_restarted:
             local('sleep 5')
-            result = local('sudo bash -c "nmap -p 8080 localhost | grep closed > /dev/null" ; echo $?', capture=True)
+            result = local('sudo bash -c "nmap -p 8080 localhost | grep closed > /dev/null" ; echo $?', capture_output=True)
             result = result[:1]
             if result == '1':
                 zeppelin_restarted = True
@@ -1745,7 +1745,7 @@ def configure_zeppelin_emr_interpreter(emr_version, cluster_name, region, spark_
         if multiple_emrs == 'true':
             while not port_number_found:
                 port_free = local('sudo bash -c "nmap -p ' + str(default_port) +
-                                  ' localhost | grep closed > /dev/null" ; echo $?', capture=True)
+                                  ' localhost | grep closed > /dev/null" ; echo $?', capture_output=True)
                 port_free = port_free[:1]
                 if port_free == '0':
                     livy_port = default_port
@@ -1819,7 +1819,7 @@ def configure_zeppelin_emr_interpreter(emr_version, cluster_name, region, spark_
 def configure_dataengine_spark(cluster_name, jars_dir, cluster_dir, datalake_enabled, spark_configs=''):
     local("jar_list=`find {0} -name '*.jar' | tr '\\n' ',' | sed 's/,$//'` ; echo \"spark.jars $jar_list\" >> \
           /tmp/{1}/notebook_spark-defaults_local.conf".format(jars_dir, cluster_name))
-    region = local('curl http://169.254.169.254/latest/meta-data/placement/availability-zone', capture=True)[:-1]
+    region = local('curl http://169.254.169.254/latest/meta-data/placement/availability-zone', capture_output=True)[:-1]
     if region == 'us-east-1':
         endpoint_url = 'https://s3.amazonaws.com'
     elif region == 'cn-north-1':
@@ -1834,7 +1834,7 @@ def configure_dataengine_spark(cluster_name, jars_dir, cluster_dir, datalake_ena
         additional_spark_properties = local('diff --changed-group-format="%>" --unchanged-group-format="" '
                                             '/tmp/{0}/notebook_spark-defaults_local.conf '
                                             '{1}spark/conf/spark-defaults.conf | grep -v "^#"'.format(
-            cluster_name, cluster_dir), capture=True)
+            cluster_name, cluster_dir), capture_output=True)
         for property in additional_spark_properties.split('\n'):
             local('echo "{0}" >> /tmp/{1}/notebook_spark-defaults_local.conf'.format(property, cluster_name))
     if os.path.exists('{0}'.format(cluster_dir)):
@@ -1842,10 +1842,10 @@ def configure_dataengine_spark(cluster_name, jars_dir, cluster_dir, datalake_ena
                                                                                                         cluster_dir))
     if spark_configs and os.path.exists('{0}'.format(cluster_dir)):
         datalab_header = local('cat /tmp/{0}/notebook_spark-defaults_local.conf | grep "^#"'.format(cluster_name),
-                               capture=True)
+                               capture_output=True)
         spark_configurations = ast.literal_eval(spark_configs)
         new_spark_defaults = list()
-        spark_defaults = local('cat {0}spark/conf/spark-defaults.conf'.format(cluster_dir), capture=True)
+        spark_defaults = local('cat {0}spark/conf/spark-defaults.conf'.format(cluster_dir), capture_output=True)
         current_spark_properties = spark_defaults.split('\n')
         for param in current_spark_properties:
             if param.split(' ')[0] != '#':
diff --git a/infrastructure-provisioning/src/general/lib/azure/actions_lib.py b/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
index e7dc348..15f3937 100644
--- a/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
+++ b/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
@@ -1184,7 +1184,7 @@ def configure_dataengine_spark(cluster_name, jars_dir, cluster_dir, datalake_ena
         additional_spark_properties = local('diff --changed-group-format="%>" --unchanged-group-format="" '
                                             '/tmp/{0}/notebook_spark-defaults_local.conf '
                                             '{1}spark/conf/spark-defaults.conf | grep -v "^#"'.format(
-                                             cluster_name, cluster_dir), capture=True)
+                                             cluster_name, cluster_dir), capture_output=True)
         for property in additional_spark_properties.split('\n'):
             local('echo "{0}" >> /tmp/{1}/notebook_spark-defaults_local.conf'.format(property, cluster_name))
     if os.path.exists('{0}'.format(cluster_dir)):
@@ -1196,10 +1196,10 @@ def configure_dataengine_spark(cluster_name, jars_dir, cluster_dir, datalake_ena
         local('cp -f /opt/hadoop/etc/hadoop/core-site.xml {}hadoop/etc/hadoop/core-site.xml'.format(cluster_dir))
     if spark_configs and os.path.exists('{0}'.format(cluster_dir)):
         datalab_header = local('cat /tmp/{0}/notebook_spark-defaults_local.conf | grep "^#"'.format(cluster_name),
-                               capture=True)
+                               capture_output=True)
         spark_configurations = ast.literal_eval(spark_configs)
         new_spark_defaults = list()
-        spark_defaults = local('cat {0}spark/conf/spark-defaults.conf'.format(cluster_dir), capture=True)
+        spark_defaults = local('cat {0}spark/conf/spark-defaults.conf'.format(cluster_dir), capture_output=True)
         current_spark_properties = spark_defaults.split('\n')
         for param in current_spark_properties:
             if param.split(' ')[0] != '#':
@@ -1341,7 +1341,7 @@ def install_dataengine_spark(cluster_name, spark_link, spark_version, hadoop_ver
             local("""echo 'export HADOOP_CLASSPATH="$HADOOP_HOME/share/hadoop/tools/lib/*"' >> {}hadoop/etc/hadoop/hadoop-env.sh""".format(cluster_dir))
             local('echo "export HADOOP_HOME={0}hadoop/" >> {0}spark/conf/spark-env.sh'.format(cluster_dir))
             local('echo "export SPARK_HOME={0}spark/" >> {0}spark/conf/spark-env.sh'.format(cluster_dir))
-            spark_dist_classpath = local('{}hadoop/bin/hadoop classpath'.format(cluster_dir), capture=True)
+            spark_dist_classpath = local('{}hadoop/bin/hadoop classpath'.format(cluster_dir), capture_output=True)
             local('echo "export SPARK_DIST_CLASSPATH={}" >> {}spark/conf/spark-env.sh'.format(
                 spark_dist_classpath, cluster_dir))
     except:
diff --git a/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py b/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py
index fc95c91..0616f89 100644
--- a/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py
+++ b/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py
@@ -1094,10 +1094,10 @@ class GCPActions:
         print("Downloading jars...")
         GCPActions().get_from_bucket(args.bucket, 'jars/{0}/jars.tar.gz'.format(args.dataproc_version), '/tmp/jars.tar.gz')
         GCPActions().get_from_bucket(args.bucket, 'jars/{0}/jars-checksum.chk'.format(args.dataproc_version), '/tmp/jars-checksum.chk')
-        if 'WARNING' in local('md5sum -c /tmp/jars-checksum.chk', capture=True):
+        if 'WARNING' in local('md5sum -c /tmp/jars-checksum.chk', capture_output=True):
             local('rm -f /tmp/jars.tar.gz')
             GCPActions().get_from_bucket(args.bucket, 'jars/{0}/jars.tar.gz'.format(args.cluster_name), '/tmp/jars.tar.gz')
-            if 'WARNING' in local('md5sum -c /tmp/jars-checksum.chk', capture=True):
+            if 'WARNING' in local('md5sum -c /tmp/jars-checksum.chk', capture_output=True):
                 print("The checksum of jars.tar.gz is mismatched. It could be caused by gcp network issue.")
                 sys.exit(1)
         local('tar -zhxvf /tmp/jars.tar.gz -C {}'.format(dataproc_dir))
@@ -1117,10 +1117,10 @@ class GCPActions:
         print("Installing spark...")
         GCPActions().get_from_bucket(args.bucket, '{0}/{1}/spark.tar.gz'.format(args.user_name, args.cluster_name), '/tmp/spark.tar.gz')
         GCPActions().get_from_bucket(args.bucket, '{0}/{1}/spark-checksum.chk'.format(args.user_name, args.cluster_name), '/tmp/spark-checksum.chk')
-        if 'WARNING' in local('md5sum -c /tmp/spark-checksum.chk', capture=True):
+        if 'WARNING' in local('md5sum -c /tmp/spark-checksum.chk', capture_output=True):
             local('rm -f /tmp/spark.tar.gz')
             GCPActions().get_from_bucket(args.bucket, '{0}/{1}/spark.tar.gz'.format(args.user_name, args.cluster_name), '/tmp/spark.tar.gz')
-            if 'WARNING' in local('md5sum -c /tmp/spark-checksum.chk', capture=True):
+            if 'WARNING' in local('md5sum -c /tmp/spark-checksum.chk', capture_output=True):
                 print("The checksum of spark.tar.gz is mismatched. It could be caused by gcp network issue.")
                 sys.exit(1)
         local('sudo tar -zhxvf /tmp/spark.tar.gz -C /opt/{0}/{1}/'.format(args.dataproc_version, args.cluster_name))
@@ -1221,7 +1221,7 @@ class GCPActions:
             local('sudo systemctl restart zeppelin-notebook.service')
             while not zeppelin_restarted:
                 local('sleep 5')
-                result = local('sudo bash -c "nmap -p 8080 localhost | grep closed > /dev/null" ; echo $?', capture=True)
+                result = local('sudo bash -c "nmap -p 8080 localhost | grep closed > /dev/null" ; echo $?', capture_output=True)
                 result = result[:1]
                 if result == '1':
                     zeppelin_restarted = True
@@ -1230,7 +1230,7 @@ class GCPActions:
             if multiple_clusters == 'true':
                 while not port_number_found:
                     port_free = local('sudo bash -c "nmap -p ' + str(default_port) +
-                                      ' localhost | grep closed > /dev/null" ; echo $?', capture=True)
+                                      ' localhost | grep closed > /dev/null" ; echo $?', capture_output=True)
                     port_free = port_free[:1]
                     if port_free == '0':
                         livy_port = default_port
@@ -1520,7 +1520,7 @@ def configure_dataengine_spark(cluster_name, jars_dir, cluster_dir, datalake_ena
         additional_spark_properties = local('diff --changed-group-format="%>" --unchanged-group-format="" '
                                             '/tmp/{0}/notebook_spark-defaults_local.conf '
                                             '{1}spark/conf/spark-defaults.conf | grep -v "^#"'.format(
-                                             cluster_name, cluster_dir), capture=True)
+                                             cluster_name, cluster_dir), capture_output=True)
         for property in additional_spark_properties.split('\n'):
             local('echo "{0}" >> /tmp/{1}/notebook_spark-defaults_local.conf'.format(property, cluster_name))
     if os.path.exists('{0}'.format(cluster_dir)):
@@ -1529,10 +1529,10 @@ def configure_dataengine_spark(cluster_name, jars_dir, cluster_dir, datalake_ena
     local('cp -f /opt/spark/conf/core-site.xml {}spark/conf/'.format(cluster_dir))
     if spark_configs and os.path.exists('{0}'.format(cluster_dir)):
         datalab_header = local('cat /tmp/{0}/notebook_spark-defaults_local.conf | grep "^#"'.format(cluster_name),
-                               capture=True)
+                               capture_output=True)
         spark_configurations = ast.literal_eval(spark_configs)
         new_spark_defaults = list()
-        spark_defaults = local('cat {0}spark/conf/spark-defaults.conf'.format(cluster_dir), capture=True)
+        spark_defaults = local('cat {0}spark/conf/spark-defaults.conf'.format(cluster_dir), capture_output=True)
         current_spark_properties = spark_defaults.split('\n')
         for param in current_spark_properties:
             if param.split(' ')[0] != '#':
diff --git a/infrastructure-provisioning/src/general/lib/os/debian/common_lib.py b/infrastructure-provisioning/src/general/lib/os/debian/common_lib.py
index c7f3359..c228e7d 100644
--- a/infrastructure-provisioning/src/general/lib/os/debian/common_lib.py
+++ b/infrastructure-provisioning/src/general/lib/os/debian/common_lib.py
@@ -26,6 +26,7 @@ from fabric.contrib.files import exists
 import sys
 import os
 import time
+import subprocess
 
 
 def manage_pkg(command, environment, requisites):
@@ -84,12 +85,12 @@ def manage_pkg(command, environment, requisites):
                             traceback.print_exc()
                             append_result("Failed to manage_pkgs", str(err))
                 elif environment == 'local':
-                    if local('sudo pgrep "^apt" -a && echo "busy" || echo "ready"', capture=True) == 'busy':
+                    if subprocess.run('sudo pgrep "^apt" -a && echo "busy" || echo "ready"', capture_output=True, shell=True) == 'busy':
                         counter += 1
                         time.sleep(10)
                     else:
                         allow = True
-                        local('sudo apt-get {0} {1}'.format(command, requisites), capture=True)
+                        subprocess.run('sudo apt-get {0} {1}'.format(command, requisites), capture_output=True, shell=True)
                 else:
                     print('Wrong environment')
     except:
@@ -154,7 +155,7 @@ def find_java_path_remote():
 
 
 def find_java_path_local():
-    java_path = local("sh -c \"update-alternatives --query java | grep 'Value: ' | grep -o '/.*/jre'\"", capture=True)
+    java_path = subprocess.run("sh -c \"update-alternatives --query java | grep 'Value: ' | grep -o '/.*/jre'\"", capture_output=True, shell=True)
     return java_path
 
 
diff --git a/infrastructure-provisioning/src/general/lib/os/redhat/common_lib.py b/infrastructure-provisioning/src/general/lib/os/redhat/common_lib.py
index bd96aa2..de5441b 100644
--- a/infrastructure-provisioning/src/general/lib/os/redhat/common_lib.py
+++ b/infrastructure-provisioning/src/general/lib/os/redhat/common_lib.py
@@ -44,12 +44,12 @@ def manage_pkg(command, environment, requisites):
                         allow = True
                         conn.sudo('yum {0} {1}'.format(command, requisites))
                 elif environment == 'local':
-                    if local('sudo pgrep yum -a && echo "busy" || echo "ready"', capture=True) == 'busy':
+                    if local('sudo pgrep yum -a && echo "busy" || echo "ready"', capture_output=True) == 'busy':
                         counter += 1
                         time.sleep(10)
                     else:
                         allow = True
-                        local('sudo yum {0} {1}'.format(command, requisites), capture=True)
+                        local('sudo yum {0} {1}'.format(command, requisites), capture_output=True)
                 else:
                     print('Wrong environment')
     except:
@@ -100,7 +100,7 @@ def find_java_path_remote():
 
 
 def find_java_path_local():
-    java_path = local("alternatives --display java | grep 'slave jre: ' | awk '{print $3}'", capture=True)
+    java_path = local("alternatives --display java | grep 'slave jre: ' | awk '{print $3}'", capture_output=True)
     return java_path
 
 
diff --git a/infrastructure-provisioning/src/general/lib/os/redhat/notebook_lib.py b/infrastructure-provisioning/src/general/lib/os/redhat/notebook_lib.py
index e3b3790..fb3113f 100644
--- a/infrastructure-provisioning/src/general/lib/os/redhat/notebook_lib.py
+++ b/infrastructure-provisioning/src/general/lib/os/redhat/notebook_lib.py
@@ -291,16 +291,16 @@ def install_livy_dependencies(os_user):
 
 def install_maven_emr(os_user):
     if not os.path.exists('/home/' + os_user + '/.ensure_dir/maven_ensured'):
-        local('wget http://apache.volia.net/maven/maven-3/3.3.9/binaries/apache-maven-3.3.9-bin.tar.gz -O /tmp/maven.tar.gz')
-        local('sudo tar -zxvf /tmp/maven.tar.gz -C /opt/')
-        local('sudo ln -fs /opt/apache-maven-3.3.9/bin/mvn /usr/bin/mvn')
-        local('touch /home/' + os_user + '/.ensure_dir/maven_ensured')
+        subprocess.run('wget http://apache.volia.net/maven/maven-3/3.3.9/binaries/apache-maven-3.3.9-bin.tar.gz -O /tmp/maven.tar.gz', shell=True)
+        subprocess.run('sudo tar -zxvf /tmp/maven.tar.gz -C /opt/', shell=True)
+        subprocess.run('sudo ln -fs /opt/apache-maven-3.3.9/bin/mvn /usr/bin/mvn', shell=True)
+        subprocess.run('touch /home/' + os_user + '/.ensure_dir/maven_ensured', shell=True)
 
 
 def install_livy_dependencies_emr(os_user):
     if not os.path.exists('/home/' + os_user + '/.ensure_dir/livy_dependencies_ensured'):
-        local('sudo -i pip3.5 install cloudpickle requests requests-kerberos flake8 flaky pytest --no-cache-dir')
-        local('touch /home/' + os_user + '/.ensure_dir/livy_dependencies_ensured')
+        subprocess.run('sudo -i pip3.5 install cloudpickle requests requests-kerberos flake8 flaky pytest --no-cache-dir', shell=True)
+        subprocess.run('touch /home/' + os_user + '/.ensure_dir/livy_dependencies_ensured', shell=True)
 
 
 def install_nodejs(os_user):
diff --git a/infrastructure-provisioning/src/general/scripts/aws/jupyter_dataengine-service_create_configs.py b/infrastructure-provisioning/src/general/scripts/aws/jupyter_dataengine-service_create_configs.py
index 61e954e..7a1a1a2 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/jupyter_dataengine-service_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/jupyter_dataengine-service_create_configs.py
@@ -160,7 +160,7 @@ def add_breeze_library_emr(args):
 def install_sparkamagic_kernels(args):
     try:
         local('sudo jupyter nbextension enable --py --sys-prefix widgetsnbextension')
-        sparkmagic_dir = local("sudo pip3 show sparkmagic | grep 'Location: ' | awk '{print $2}'", capture=True)
+        sparkmagic_dir = local("sudo pip3 show sparkmagic | grep 'Location: ' | awk '{print $2}'", capture_output=True)
         local('sudo jupyter-kernelspec install {}/sparkmagic/kernels/sparkkernel --user'.format(sparkmagic_dir))
         local('sudo jupyter-kernelspec install {}/sparkmagic/kernels/pysparkkernel --user'.format(sparkmagic_dir))
         local('sudo jupyter-kernelspec install {}/sparkmagic/kernels/sparkrkernel --user'.format(sparkmagic_dir))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/jupyter_dataengine-service_create_configs.py b/infrastructure-provisioning/src/general/scripts/gcp/jupyter_dataengine-service_create_configs.py
index 0a33898..f915706 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/jupyter_dataengine-service_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/jupyter_dataengine-service_create_configs.py
@@ -109,7 +109,7 @@ def toree_kernel(args):
 def install_sparkamagic_kernels(args):
     try:
         local('sudo jupyter nbextension enable --py --sys-prefix widgetsnbextension')
-        sparkmagic_dir = local("sudo pip3 show sparkmagic | grep 'Location: ' | awk '{print $2}'", capture=True)
+        sparkmagic_dir = local("sudo pip3 show sparkmagic | grep 'Location: ' | awk '{print $2}'", capture_output=True)
         local('sudo jupyter-kernelspec install {}/sparkmagic/kernels/sparkkernel --user'.format(sparkmagic_dir))
         local('sudo jupyter-kernelspec install {}/sparkmagic/kernels/pysparkkernel --user'.format(sparkmagic_dir))
         local('sudo jupyter-kernelspec install {}/sparkmagic/kernels/sparkrkernel --user'.format(sparkmagic_dir))
diff --git a/infrastructure-provisioning/src/general/scripts/os/deeplearning_dataengine_create_configs.py b/infrastructure-provisioning/src/general/scripts/os/deeplearning_dataengine_create_configs.py
index acdb889..bdbf328 100644
--- a/infrastructure-provisioning/src/general/scripts/os/deeplearning_dataengine_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/deeplearning_dataengine_create_configs.py
@@ -97,7 +97,7 @@ def pyspark_kernel(args):
 def install_sparkamagic_kernels(args):
     try:
         local('sudo jupyter nbextension enable --py --sys-prefix widgetsnbextension')
-        sparkmagic_dir = local("sudo pip3 show sparkmagic | grep 'Location: ' | awk '{print $2}'", capture=True)
+        sparkmagic_dir = local("sudo pip3 show sparkmagic | grep 'Location: ' | awk '{print $2}'", capture_output=True)
         local('sudo jupyter-kernelspec install {}/sparkmagic/kernels/sparkkernel --user'.format(sparkmagic_dir))
         local('sudo jupyter-kernelspec install {}/sparkmagic/kernels/pysparkkernel --user'.format(sparkmagic_dir))
 
@@ -105,7 +105,7 @@ def install_sparkamagic_kernels(args):
                                                                          args.cluster_name)
         local('sed -i \'s|PySpark|{0}|g\' /home/{1}/.local/share/jupyter/kernels/pysparkkernel/kernel.json'.format(
             pyspark_kernel_name, args.os_user))
-        scala_version = local('spark-submit --version 2>&1 | grep -o -P "Scala version \K.{0,7}"', capture=True)
+        scala_version = local('spark-submit --version 2>&1 | grep -o -P "Scala version \K.{0,7}"', capture_output=True)
         spark_kernel_name = 'Spark (Scala-{0} / Spark-{1} ) [{2}]'.format(scala_version, args.spark_version,
                                                                          args.cluster_name)
         local('sed -i \'s|Spark|{0}|g\' /home/{1}/.local/share/jupyter/kernels/sparkkernel/kernel.json'.format(
diff --git a/infrastructure-provisioning/src/general/scripts/os/get_list_available_pkgs.py b/infrastructure-provisioning/src/general/scripts/os/get_list_available_pkgs.py
index cdb7a66..402d2ea 100644
--- a/infrastructure-provisioning/src/general/scripts/os/get_list_available_pkgs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/get_list_available_pkgs.py
@@ -24,6 +24,7 @@
 import argparse
 import json
 import sys
+import time
 import xmlrpc.client
 from datalab.fab import *
 from datalab.notebook_lib import *
@@ -49,7 +50,7 @@ def get_available_pip_pkgs(version):
                     pip_pkgs[pkg] = "N/A"
                 return pip_pkgs
             else:
-                local('sleep 5')
+                time.sleep(5)
                 continue
     except Exception as err:
         print('Error: {0}'.format(err))
diff --git a/infrastructure-provisioning/src/general/scripts/os/jupyter_dataengine_create_configs.py b/infrastructure-provisioning/src/general/scripts/os/jupyter_dataengine_create_configs.py
index f7ed852..0bea54e 100644
--- a/infrastructure-provisioning/src/general/scripts/os/jupyter_dataengine_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/jupyter_dataengine_create_configs.py
@@ -74,7 +74,7 @@ def r_kernel(args):
 
 def toree_kernel(args):
     spark_path = '/opt/' + args.cluster_name + '/spark/'
-    scala_version = local('spark-submit --version 2>&1 | grep -o -P "Scala version \K.{0,7}"', capture=True)
+    scala_version = local('spark-submit --version 2>&1 | grep -o -P "Scala version \K.{0,7}"', capture_output=True)
     local('mkdir -p ' + kernels_dir + 'toree_' + args.cluster_name + '/')
     local('tar zxvf /tmp/{}/toree_kernel.tar.gz -C '.format(args.cluster_name) + kernels_dir + 'toree_' + args.cluster_name + '/')
     local('sudo mv {0}toree_{1}/toree-0.3.0-incubating/* {0}toree_{1}/'.format(kernels_dir, args.cluster_name))
@@ -152,7 +152,7 @@ def pyspark_kernel(args):
 def install_sparkamagic_kernels(args):
     try:
         local('sudo jupyter nbextension enable --py --sys-prefix widgetsnbextension')
-        sparkmagic_dir = local("sudo pip3 show sparkmagic | grep 'Location: ' | awk '{print $2}'", capture=True)
+        sparkmagic_dir = local("sudo pip3 show sparkmagic | grep 'Location: ' | awk '{print $2}'", capture_output=True)
         local('sudo jupyter-kernelspec install {}/sparkmagic/kernels/sparkkernel --user'.format(sparkmagic_dir))
         local('sudo jupyter-kernelspec install {}/sparkmagic/kernels/pysparkkernel --user'.format(sparkmagic_dir))
         local('sudo jupyter-kernelspec install {}/sparkmagic/kernels/sparkrkernel --user'.format(sparkmagic_dir))
@@ -160,12 +160,12 @@ def install_sparkamagic_kernels(args):
                                                                          args.cluster_name)
         local('sed -i \'s|PySpark|{0}|g\' /home/{1}/.local/share/jupyter/kernels/pysparkkernel/kernel.json'.format(
             pyspark_kernel_name, args.os_user))
-        scala_version = local('spark-submit --version 2>&1 | grep -o -P "Scala version \K.{0,7}"', capture=True)
+        scala_version = local('spark-submit --version 2>&1 | grep -o -P "Scala version \K.{0,7}"', capture_output=True)
         spark_kernel_name = 'Spark (Scala-{0} / Spark-{1} ) [{2}]'.format(scala_version, args.spark_version,
                                                                          args.cluster_name)
         local('sed -i \'s|Spark|{0}|g\' /home/{1}/.local/share/jupyter/kernels/sparkkernel/kernel.json'.format(
             spark_kernel_name, args.os_user))
-        r_version = local("R --version | awk '/version / {print $3}'", capture=True)
+        r_version = local("R --version | awk '/version / {print $3}'", capture_output=True)
         sparkr_kernel_name = 'SparkR (R-{0} / Spark-{1} ) [{2}]'.format(str(r_version), args.spark_version,
                                                                             args.cluster_name)
         local('sed -i \'s|SparkR|{0}|g\' /home/{1}/.local/share/jupyter/kernels/sparkrkernel/kernel.json'.format(
diff --git a/infrastructure-provisioning/src/general/scripts/os/tensor_dataengine_create_configs.py b/infrastructure-provisioning/src/general/scripts/os/tensor_dataengine_create_configs.py
index 38382c6..a0f0f4f 100644
--- a/infrastructure-provisioning/src/general/scripts/os/tensor_dataengine_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/tensor_dataengine_create_configs.py
@@ -97,7 +97,7 @@ def pyspark_kernel(args):
 def install_sparkamagic_kernels(args):
     try:
         local('sudo jupyter nbextension enable --py --sys-prefix widgetsnbextension')
-        sparkmagic_dir = local("sudo pip3 show sparkmagic | grep 'Location: ' | awk '{print $2}'", capture=True)
+        sparkmagic_dir = local("sudo pip3 show sparkmagic | grep 'Location: ' | awk '{print $2}'", capture_output=True)
         local('sudo jupyter-kernelspec install {}/sparkmagic/kernels/sparkkernel --user'.format(sparkmagic_dir))
         local('sudo jupyter-kernelspec install {}/sparkmagic/kernels/pysparkkernel --user'.format(sparkmagic_dir))
 
@@ -105,7 +105,7 @@ def install_sparkamagic_kernels(args):
                                                                          args.cluster_name)
         local('sed -i \'s|PySpark|{0}|g\' /home/{1}/.local/share/jupyter/kernels/pysparkkernel/kernel.json'.format(
             pyspark_kernel_name, args.os_user))
-        scala_version = local('spark-submit --version 2>&1 | grep -o -P "Scala version \K.{0,7}"', capture=True)
+        scala_version = local('spark-submit --version 2>&1 | grep -o -P "Scala version \K.{0,7}"', capture_output=True)
         spark_kernel_name = 'Spark (Scala-{0} / Spark-{1} ) [{2}]'.format(scala_version, args.spark_version,
                                                                          args.cluster_name)
         local('sed -i \'s|Spark|{0}|g\' /home/{1}/.local/share/jupyter/kernels/sparkkernel/kernel.json'.format(
diff --git a/infrastructure-provisioning/src/general/scripts/os/zeppelin_dataengine_create_configs.py b/infrastructure-provisioning/src/general/scripts/os/zeppelin_dataengine_create_configs.py
index e71db17..3fa8f30 100644
--- a/infrastructure-provisioning/src/general/scripts/os/zeppelin_dataengine_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/zeppelin_dataengine_create_configs.py
@@ -24,6 +24,7 @@
 import argparse
 import os
 import sys
+import subprocess
 from datalab.actions_lib import *
 from datalab.common_lib import *
 from datalab.fab import *
@@ -61,38 +62,38 @@ def configure_zeppelin_dataengine_interpreter(cluster_name, cluster_dir, os_user
         default_port = 8998
         livy_port = ''
         livy_path = '/opt/' + cluster_name + '/livy/'
-        local('echo \"Configuring Data Engine path for Zeppelin\"')
-        local('sed -i \"s/^export SPARK_HOME.*/export SPARK_HOME=\/opt\/' + cluster_name +
-              '\/spark/\" /opt/zeppelin/conf/zeppelin-env.sh')
-        local('sudo chown ' + os_user + ':' + os_user + ' -R /opt/zeppelin/')
-        local('sudo systemctl daemon-reload')
-        local('sudo service zeppelin-notebook stop')
-        local('sudo service zeppelin-notebook start')
+        subprocess.run('echo \"Configuring Data Engine path for Zeppelin\"', shell=True)
+        subprocess.run('sed -i \"s/^export SPARK_HOME.*/export SPARK_HOME=\/opt\/' + cluster_name +
+              '\/spark/\" /opt/zeppelin/conf/zeppelin-env.sh', shell=True)
+        subprocess.run('sudo chown ' + os_user + ':' + os_user + ' -R /opt/zeppelin/', shell=True)
+        subprocess.run('sudo systemctl daemon-reload', shell=True)
+        subprocess.run('sudo service zeppelin-notebook stop', shell=True)
+        subprocess.run('sudo service zeppelin-notebook start', shell=True)
         while not zeppelin_restarted:
-            local('sleep 5')
-            result = local('sudo bash -c "nmap -p 8080 localhost | grep closed > /dev/null" ; echo $?', capture=True)
+            subprocess.run('sleep 5', shell=True)
+            result = subprocess.run('sudo bash -c "nmap -p 8080 localhost | grep closed > /dev/null" ; echo $?', capture_output=True, shell=True)
             result = result[:1]
             if result == '1':
                 zeppelin_restarted = True
-        local('sleep 5')
-        local('echo \"Configuring Data Engine spark interpreter for Zeppelin\"')
+        subprocess.run('sleep 5', shell=True)
+        subprocess.run('echo \"Configuring Data Engine spark interpreter for Zeppelin\"', shell=True)
         if multiple_clusters == 'true':
             while not port_number_found:
-                port_free = local('sudo bash -c "nmap -p ' + str(default_port) +
-                                  ' localhost | grep closed > /dev/null" ; echo $?', capture=True)
+                port_free = subprocess.run('sudo bash -c "nmap -p ' + str(default_port) +
+                                  ' localhost | grep closed > /dev/null" ; echo $?', capture_output=True, shell=True)
                 port_free = port_free[:1]
                 if port_free == '0':
                     livy_port = default_port
                     port_number_found = True
                 else:
                     default_port += 1
-            local('sudo echo "livy.server.port = ' + str(livy_port) + '" >> ' + livy_path + 'conf/livy.conf')
-            local('sudo echo "livy.spark.master = ' + spark_master + '" >> ' + livy_path + 'conf/livy.conf')
+            subprocess.run('sudo echo "livy.server.port = ' + str(livy_port) + '" >> ' + livy_path + 'conf/livy.conf', shell=True)
+            subprocess.run('sudo echo "livy.spark.master = ' + spark_master + '" >> ' + livy_path + 'conf/livy.conf', shell=True)
             if os.path.exists(livy_path + 'conf/spark-blacklist.conf'):
-                local('sudo sed -i "s/^/#/g" ' + livy_path + 'conf/spark-blacklist.conf')
-            local(''' sudo echo "export SPARK_HOME=''' + cluster_dir + '''spark/" >> ''' + livy_path + '''conf/livy-env.sh''')
-            local(''' sudo echo "export PYSPARK3_PYTHON=python3.8" >> ''' +
-                  livy_path + '''conf/livy-env.sh''')
+                subprocess.run('sudo sed -i "s/^/#/g" ' + livy_path + 'conf/spark-blacklist.conf', shell=True)
+            subprocess.run(''' sudo echo "export SPARK_HOME=''' + cluster_dir + '''spark/" >> ''' + livy_path + '''conf/livy-env.sh''', shell=True)
+            subprocess.run(''' sudo echo "export PYSPARK3_PYTHON=python3.8" >> ''' +
+                  livy_path + '''conf/livy-env.sh''', shell=True)
             template_file = "/tmp/{}/dataengine_interpreter.json".format(args.cluster_name)
             fr = open(template_file, 'r+')
             text = fr.read()
@@ -105,21 +106,21 @@ def configure_zeppelin_dataengine_interpreter(cluster_name, cluster_dir, os_user
             fw.close()
             for _ in range(5):
                 try:
-                    local("curl --noproxy localhost -H 'Content-Type: application/json' -X POST -d " +
-                          "@/tmp/{}/dataengine_interpreter.json http://localhost:8080/api/interpreter/setting".format(args.cluster_name))
+                    subprocess.run("curl --noproxy localhost -H 'Content-Type: application/json' -X POST -d " +
+                          "@/tmp/{}/dataengine_interpreter.json http://localhost:8080/api/interpreter/setting".format(args.cluster_name), shell=True)
                     break
                 except:
-                    local('sleep 5')
-            local('sudo cp /opt/livy-server-cluster.service /etc/systemd/system/livy-server-' + str(livy_port) +
-                  '.service')
-            local("sudo sed -i 's|OS_USER|" + os_user + "|' /etc/systemd/system/livy-server-" + str(livy_port) +
-                  '.service')
-            local("sudo sed -i 's|LIVY_PATH|" + livy_path + "|' /etc/systemd/system/livy-server-" + str(livy_port)
-                  + '.service')
-            local('sudo chmod 644 /etc/systemd/system/livy-server-' + str(livy_port) + '.service')
-            local("sudo systemctl daemon-reload")
-            local("sudo systemctl enable livy-server-" + str(livy_port))
-            local('sudo systemctl start livy-server-' + str(livy_port))
+                    subprocess.run('sleep 5', shell=True)
+            subprocess.run('sudo cp /opt/livy-server-cluster.service /etc/systemd/system/livy-server-' + str(livy_port) +
+                  '.service', shell=True)
+            subprocess.run("sudo sed -i 's|OS_USER|" + os_user + "|' /etc/systemd/system/livy-server-" + str(livy_port) +
+                  '.service', shell=True)
+            subprocess.run("sudo sed -i 's|LIVY_PATH|" + livy_path + "|' /etc/systemd/system/livy-server-" + str(livy_port)
+                  + '.service', shell=True)
+            subprocess.run('sudo chmod 644 /etc/systemd/system/livy-server-' + str(livy_port) + '.service', shell=True)
+            subprocess.run("sudo systemctl daemon-reload", shell=True)
+            subprocess.run("sudo systemctl enable livy-server-" + str(livy_port), shell=True)
+            subprocess.run('sudo systemctl start livy-server-' + str(livy_port), shell=True)
         else:
             template_file = "/tmp/{}/dataengine_interpreter.json".format(args.cluster_name)
             p_versions = ["2", "3.8"]
@@ -137,32 +138,32 @@ def configure_zeppelin_dataengine_interpreter(cluster_name, cluster_dir, os_user
                 fw.close()
                 for _ in range(5):
                     try:
-                        local("curl --noproxy localhost -H 'Content-Type: application/json' -X POST -d " +
+                        subprocess.run("curl --noproxy localhost -H 'Content-Type: application/json' -X POST -d " +
                               "@/tmp/dataengine_spark_py" + p_version +
-                              "_interpreter.json http://localhost:8080/api/interpreter/setting")
+                              "_interpreter.json http://localhost:8080/api/interpreter/setting", shell=True)
                         break
                     except:
-                        local('sleep 5')
-        local('touch /home/' + os_user + '/.ensure_dir/dataengine_' + cluster_name + '_interpreter_ensured')
+                        subprocess.run('sleep 5', shell=True)
+        subprocess.run('touch /home/' + os_user + '/.ensure_dir/dataengine_' + cluster_name + '_interpreter_ensured', shell=True)
     except Exception as err:
         print('Error: {0}'.format(err))
         sys.exit(1)
 
 
 def install_remote_livy(args):
-    local('sudo chown ' + args.os_user + ':' + args.os_user + ' -R /opt/zeppelin/')
-    local('sudo service zeppelin-notebook stop')
-    local('sudo -i wget http://archive.cloudera.com/beta/livy/livy-server-' + args.livy_version + '.zip -O /opt/' +
-          args.cluster_name + '/livy-server-' + args.livy_version + '.zip')
-    local('sudo unzip /opt/' + args.cluster_name + '/livy-server-' + args.livy_version + '.zip -d /opt/' +
-          args.cluster_name + '/')
-    local('sudo mv /opt/' + args.cluster_name + '/livy-server-' + args.livy_version + '/ /opt/' + args.cluster_name +
-          '/livy/')
+    subprocess.run('sudo chown ' + args.os_user + ':' + args.os_user + ' -R /opt/zeppelin/', shell=True)
+    subprocess.run('sudo service zeppelin-notebook stop', shell=True)
+    subprocess.run('sudo -i wget http://archive.cloudera.com/beta/livy/livy-server-' + args.livy_version + '.zip -O /opt/' +
+          args.cluster_name + '/livy-server-' + args.livy_version + '.zip', shell=True)
+    subprocess.run('sudo unzip /opt/' + args.cluster_name + '/livy-server-' + args.livy_version + '.zip -d /opt/' +
+          args.cluster_name + '/', shell=True)
+    subprocess.run('sudo mv /opt/' + args.cluster_name + '/livy-server-' + args.livy_version + '/ /opt/' + args.cluster_name +
+          '/livy/', shell=True)
     livy_path = '/opt/' + args.cluster_name + '/livy/'
-    local('sudo mkdir -p ' + livy_path + '/logs')
-    local('sudo mkdir -p /var/run/livy')
-    local('sudo chown ' + args.os_user + ':' + args.os_user + ' -R /var/run/livy')
-    local('sudo chown ' + args.os_user + ':' + args.os_user + ' -R ' + livy_path)
+    subprocess.run('sudo mkdir -p ' + livy_path + '/logs', shell=True)
+    subprocess.run('sudo mkdir -p /var/run/livy', shell=True)
+    subprocess.run('sudo chown ' + args.os_user + ':' + args.os_user + ' -R /var/run/livy', shell=True)
+    subprocess.run('sudo chown ' + args.os_user + ':' + args.os_user + ' -R ' + livy_path, shell=True)
 
 
 if __name__ == "__main__":
diff --git a/infrastructure-provisioning/src/ssn/scripts/configure_ui.py b/infrastructure-provisioning/src/ssn/scripts/configure_ui.py
index b920aac..5be39df 100644
--- a/infrastructure-provisioning/src/ssn/scripts/configure_ui.py
+++ b/infrastructure-provisioning/src/ssn/scripts/configure_ui.py
@@ -27,6 +27,7 @@ import logging
 import os
 import sys
 import traceback
+import subprocess
 from datalab.fab import *
 from datalab.ssn_lib import *
 from fabric import *
@@ -92,7 +93,7 @@ def copy_ssn_libraries():
     try:
         conn.sudo('mkdir -p /usr/lib/python3.8/datalab/')
         conn.run('mkdir -p /tmp/datalab_libs/')
-        local('scp -i {} /usr/lib/python3.8/datalab/*.py {}:/tmp/datalab_libs/'.format(args.keyfile, host_string))
+        subprocess.run('scp -i {} /usr/lib/python3.8/datalab/*.py {}:/tmp/datalab_libs/'.format(args.keyfile, host_string), shell=True)
         conn.run('chmod a+x /tmp/datalab_libs/*')
         conn.sudo('mv /tmp/datalab_libs/* /usr/lib/python3.8/datalab/')
         if exists('/usr/lib64'):
@@ -108,27 +109,27 @@ def configure_mongo(mongo_passwd, default_endpoint_name):
     try:
         if not exists("/lib/systemd/system/mongod.service"):
             if os.environ['conf_os_family'] == 'debian':
-                local('sed -i "s/MONGO_USR/mongodb/g" /root/templates/mongod.service_template')
+                subprocess.run('sed -i "s/MONGO_USR/mongodb/g" /root/templates/mongod.service_template', shell=True)
             elif os.environ['conf_os_family'] == 'redhat':
-                local('sed -i "s/MONGO_USR/mongod/g" /root/templates/mongod.service_template')
-            local('scp -i {} /root/templates/mongod.service_template {}:/tmp/mongod.service'.format(args.keyfile,
-                                                                                                    host_string))
+                subprocess.run('sed -i "s/MONGO_USR/mongod/g" /root/templates/mongod.service_template', shell=True)
+            subprocess.run('scp -i {} /root/templates/mongod.service_template {}:/tmp/mongod.service'.format(args.keyfile,
+                                                                                                    host_string), shell=True)
             conn.sudo('mv /tmp/mongod.service /lib/systemd/system/mongod.service')
             conn.sudo('systemctl daemon-reload')
             conn.sudo('systemctl enable mongod.service')
-        local('sed -i "s|PASSWORD|{}|g" /root/scripts/resource_status.py'.format(mongo_passwd))
-        local('scp -i {} /root/scripts/resource_status.py {}:/tmp/resource_status.py'.format(args.keyfile,
-                                                                                             host_string))
+        subprocess.run('sed -i "s|PASSWORD|{}|g" /root/scripts/resource_status.py'.format(mongo_passwd), shell=True)
+        subprocess.run('scp -i {} /root/scripts/resource_status.py {}:/tmp/resource_status.py'.format(args.keyfile,
+                                                                                             host_string), shell=True)
         conn.sudo('mv /tmp/resource_status.py ' + os.environ['ssn_datalab_path'] + 'tmp/')
-        local('sed -i "s|PASSWORD|{}|g" /root/scripts/configure_mongo.py'.format(mongo_passwd))
-        local('scp -i {} /root/scripts/configure_mongo.py {}:/tmp/configure_mongo.py'.format(args.keyfile,
-                                                                                             host_string))
+        subprocess.run('sed -i "s|PASSWORD|{}|g" /root/scripts/configure_mongo.py'.format(mongo_passwd), shell=True)
+        subprocess.run('scp -i {} /root/scripts/configure_mongo.py {}:/tmp/configure_mongo.py'.format(args.keyfile,
+                                                                                             host_string), shell=True)
         conn.sudo('mv /tmp/configure_mongo.py ' + args.datalab_path + 'tmp/')
-        local('scp -i {} /root/files/{}/mongo_roles.json {}:/tmp/mongo_roles.json'.format(args.keyfile,
+        subprocess.run('scp -i {} /root/files/{}/mongo_roles.json {}:/tmp/mongo_roles.json'.format(args.keyfile,
                                                                                           args.cloud_provider,
-                                                                                          host_string))
-        local('scp -i {} /root/files/local_endpoint.json {}:/tmp/local_endpoint.json'.format(args.keyfile,
-                                                                                             host_string))
+                                                                                          host_string), shell=True)
+        subprocess.run('scp -i {} /root/files/local_endpoint.json {}:/tmp/local_endpoint.json'.format(args.keyfile,
+                                                                                             host_string), shell=True)
         conn.sudo('mv /tmp/mongo_roles.json ' + args.datalab_path + 'tmp/')
         conn.sudo('sed -i "s|DEF_ENDPOINT_NAME|{0}|g" /tmp/local_endpoint.json'.format(default_endpoint_name))
         conn.sudo('sed -i "s|CLOUD_PROVIDER|{0}|g" /tmp/local_endpoint.json'.format(
diff --git a/infrastructure-provisioning/src/ssn/scripts/docker_build.py b/infrastructure-provisioning/src/ssn/scripts/docker_build.py
index 9347d6b..2cff8df 100644
--- a/infrastructure-provisioning/src/ssn/scripts/docker_build.py
+++ b/infrastructure-provisioning/src/ssn/scripts/docker_build.py
@@ -48,13 +48,13 @@ else:
 
 def image_build(src_path, node):
     try:
-        if local("cat /etc/lsb-release | grep DISTRIB_ID | awk -F '=' '{print $2}'", capture=True).stdout == 'Ubuntu':
+        if local("cat /etc/lsb-release | grep DISTRIB_ID | awk -F '=' '{print $2}'", capture_output=True).stdout == 'Ubuntu':
             os_family = 'debian'
         else:
             os_family = 'redhat'
-        if local("uname -r | awk -F '-' '{print $3}'", capture=True).stdout == 'aws':
+        if local("uname -r | awk -F '-' '{print $3}'", capture_output=True).stdout == 'aws':
             cloud_provider = 'aws'
-        elif local("uname -r | awk -F '-' '{print $3}'", capture=True).stdout == 'azure':
+        elif local("uname -r | awk -F '-' '{print $3}'", capture_output=True).stdout == 'azure':
             cloud_provider = 'azure'
             if not exists('{}base/azure_auth.json'.format(src_path)):
                 local('cp /home/datalab-user/keys/azure_auth.json {}base/azure_auth.json'.format(src_path))
diff --git a/integration-tests/examples/scenario_deeplearning/deeplearning_tests.py b/integration-tests/examples/scenario_deeplearning/deeplearning_tests.py
index c862814..af038af 100644
--- a/integration-tests/examples/scenario_deeplearning/deeplearning_tests.py
+++ b/integration-tests/examples/scenario_deeplearning/deeplearning_tests.py
@@ -24,6 +24,7 @@
 import os, sys, json
 from fabric import *
 import argparse
+import subprocess
 
 
 parser = argparse.ArgumentParser()
@@ -38,17 +39,17 @@ args = parser.parse_args()
 
 def prepare_templates():
     try:
-        local('/bin/bash -c "source /etc/profile && wget http://files.fast.ai/data/dogscats.zip -O /tmp/dogscats.zip"')
-        local('unzip -q /tmp/dogscats.zip -d /tmp')
-        local('/bin/bash -c "mkdir -p /home/{0}/{1}"'.format(args.os_user, "{test,train}"))
-        local('mv /tmp/dogscats/test1/* /home/{0}/test'.format(args.os_user))
-        local('/bin/bash -c "mv /tmp/dogscats/valid/{0}/* /home/{1}/train"'.format("{cats,dogs}", args.os_user))
-        local('/bin/bash -c "mv /tmp/dogscats/train/{0}/* /home/{1}/train"'.format("{cats,dogs}", args.os_user))
+        subprocess.run('/bin/bash -c "source /etc/profile && wget http://files.fast.ai/data/dogscats.zip -O /tmp/dogscats.zip"', shell=True)
+        subprocess.run('unzip -q /tmp/dogscats.zip -d /tmp', shell=True)
+        subprocess.run('/bin/bash -c "mkdir -p /home/{0}/{1}"'.format(args.os_user, "{test,train}"), shell=True)
+        subprocess.run('mv /tmp/dogscats/test1/* /home/{0}/test'.format(args.os_user), shell=True)
+        subprocess.run('/bin/bash -c "mv /tmp/dogscats/valid/{0}/* /home/{1}/train"'.format("{cats,dogs}", args.os_user), shell=True)
+        subprocess.run('/bin/bash -c "mv /tmp/dogscats/train/{0}/* /home/{1}/train"'.format("{cats,dogs}", args.os_user), shell=True)
     except Exception as err:
         print('Failed to download/unpack image dataset!', str(err))
         sys.exit(1)
-    local('mkdir -p /home/{0}/logs'.format(args.os_user))
-    local('mv /tmp/deeplearning /home/{0}/test_templates'.format(args.os_user))
+    subprocess.run('mkdir -p /home/{0}/logs'.format(args.os_user), shell=True)
+    subprocess.run('mv /tmp/deeplearning /home/{0}/test_templates'.format(args.os_user), shell=True)
 
 def get_storage():
     storages = {"aws": args.storage,
@@ -68,8 +69,8 @@ def prepare_ipynb(kernel_name, template_path, ipynb_name):
         f.write(text)
 
 def run_ipynb(ipynb_name):
-    local('export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/opt/cudnn/lib64:/usr/local/cuda/lib64:/usr/lib64/openmpi/lib; ' \
-            'jupyter nbconvert --ExecutePreprocessor.timeout=-1 --ExecutePreprocessor.startup_timeout=300 --execute /home/{}/{}.ipynb'.format(args.os_user, ipynb_name))
+    subprocess.run('export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/opt/cudnn/lib64:/usr/local/cuda/lib64:/usr/lib64/openmpi/lib; ' \
+            'jupyter nbconvert --ExecutePreprocessor.timeout=-1 --ExecutePreprocessor.startup_timeout=300 --execute /home/{}/{}.ipynb'.format(args.os_user, ipynb_name), shell=True)
 
 def run_tensor():
     interpreters = ['pyspark_local']
diff --git a/integration-tests/examples/scenario_zeppelin/zeppelin_tests.py b/integration-tests/examples/scenario_zeppelin/zeppelin_tests.py
index 18919be..ff43d7d 100644
--- a/integration-tests/examples/scenario_zeppelin/zeppelin_tests.py
+++ b/integration-tests/examples/scenario_zeppelin/zeppelin_tests.py
@@ -168,7 +168,7 @@ def run_spark():
 
 if __name__ == "__main__":
     try:
-        notebook_ip = local('hostname -I', capture=True)
+        notebook_ip = local('hostname -I', capture_output=True)
         prepare_templates()
         run_pyspark()
         run_sparkr()


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org