You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@datalab.apache.org by lf...@apache.org on 2021/02/08 10:52:36 UTC

[incubator-datalab] branch DATALAB-2091-fab2 updated (2b65f72 -> 5d8eb01)

This is an automated email from the ASF dual-hosted git repository.

lfrolov pushed a change to branch DATALAB-2091-fab2
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git.


    from 2b65f72  [DATALAB-2091]: partially replaced local with subprocess.run(
     new a92b4a1  [DATALAB-2091]: partially replaced local with subprocess.run(
     new 5d8eb01  [DATALAB-2091]: partially replaced local with subprocess.run(

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../src/general/scripts/aws/edge_configure.py      | 15 +++----
 .../src/general/scripts/aws/zeppelin_configure.py  | 17 ++++----
 .../scripts/azure/common_prepare_notebook.py       |  3 +-
 .../src/general/scripts/azure/jupyter_configure.py | 23 +++++-----
 .../src/general/scripts/azure/tensor_configure.py  | 21 +++++-----
 .../general/scripts/azure/zeppelin_configure.py    | 21 +++++-----
 infrastructure-provisioning/src/jupyter/fabfile.py | 29 ++++++-------
 .../src/jupyterlab/fabfile.py                      | 27 ++++++------
 .../src/project/scripts/reupload_ssh_key.py        |  3 +-
 infrastructure-provisioning/src/rstudio/fabfile.py | 29 ++++++-------
 .../src/ssn/scripts/configure_gitlab.py            | 49 +++++++++++-----------
 .../src/ssn/scripts/restore.py                     | 41 +++++++++---------
 .../src/tensor-rstudio/fabfile.py                  | 27 ++++++------
 infrastructure-provisioning/src/tensor/fabfile.py  | 27 ++++++------
 integration-tests/examples/copy_files.py           |  7 ++--
 .../examples/scenario_jupyter/jupyter_tests.py     |  6 +--
 16 files changed, 180 insertions(+), 165 deletions(-)


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org


[incubator-datalab] 02/02: [DATALAB-2091]: partially replaced local with subprocess.run(

Posted by lf...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

lfrolov pushed a commit to branch DATALAB-2091-fab2
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git

commit 5d8eb0129bee8a0ac9f9f9dc26edbd96bfe81787
Author: leonidfrolov <fr...@gmail.com>
AuthorDate: Mon Feb 8 12:51:14 2021 +0200

    [DATALAB-2091]: partially replaced local with subprocess.run(
---
 .../src/general/scripts/azure/jupyter_configure.py | 23 +++++++++---------
 .../general/scripts/azure/zeppelin_configure.py    | 21 +++++++++--------
 .../src/tensor-rstudio/fabfile.py                  | 27 +++++++++++-----------
 integration-tests/examples/copy_files.py           |  7 +++---
 4 files changed, 41 insertions(+), 37 deletions(-)

diff --git a/infrastructure-provisioning/src/general/scripts/azure/jupyter_configure.py b/infrastructure-provisioning/src/general/scripts/azure/jupyter_configure.py
index 5002078..0028e39 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/jupyter_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/jupyter_configure.py
@@ -29,6 +29,7 @@ import logging
 import os
 import sys
 import traceback
+import subprocess
 from fabric import *
 
 if __name__ == "__main__":
@@ -139,7 +140,7 @@ if __name__ == "__main__":
             notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
 
         try:
-            local("~/scripts/{}.py {}".format('create_ssh_user', params))
+            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -157,7 +158,7 @@ if __name__ == "__main__":
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
                     notebook_config['datalab_ssh_user'])
         try:
-            local("~/scripts/{}.py {}".format('common_configure_proxy', params))
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -174,7 +175,7 @@ if __name__ == "__main__":
             format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['azure_region'],
                    edge_instance_private_hostname)
         try:
-            local("~/scripts/{}.py {}".format('install_prerequisites', params))
+            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -198,7 +199,7 @@ if __name__ == "__main__":
                    os.environ['notebook_scala_version'], os.environ['notebook_r_mirror'],
                    notebook_config['ip_address'], notebook_config['exploratory_name'], edge_hostname)
         try:
-            local("~/scripts/{}.py {}".format('configure_jupyter_node', params))
+            subprocess.run("~/scripts/{}.py {}".format('configure_jupyter_node', params), shell=True)
             datalab.actions_lib.remount_azure_disk(True, notebook_config['datalab_ssh_user'], instance_hostname,
                                                    os.environ['conf_key_dir'] + os.environ['conf_key_name'] + ".pem")
         except:
@@ -217,7 +218,7 @@ if __name__ == "__main__":
         params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
             instance_hostname, keyfile_name, json.dumps(additional_config), notebook_config['datalab_ssh_user'])
         try:
-            local("~/scripts/{}.py {}".format('install_user_key', params))
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
         except:
             datalab.fab.append_result("Failed installing users key")
             raise Exception
@@ -232,8 +233,8 @@ if __name__ == "__main__":
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
         try:
-            # local("~/scripts/{}.py {}".format('common_download_git_certfile', params))
-            local("~/scripts/{}.py {}".format('manage_git_creds', params))
+            # subprocess.run("~/scripts/{}.py {}".format('common_download_git_certfile', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True)
         except:
             datalab.fab.append_result("Failed setup git credentials")
             raise Exception
@@ -250,7 +251,7 @@ if __name__ == "__main__":
                 .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
                         notebook_config['resource_group_name'], notebook_config['instance_name'])
             try:
-                local("~/scripts/{}.py {}".format('common_remove_remote_kernels', params))
+                subprocess.run("~/scripts/{}.py {}".format('common_remove_remote_kernels', params), shell=True)
             except:
                 traceback.print_exc()
                 raise Exception
@@ -274,7 +275,7 @@ if __name__ == "__main__":
                                                         notebook_config['expected_image_name'],
                                                         json.dumps(notebook_config['image_tags']))
                 print("Image was successfully created.")
-                local("~/scripts/{}.py".format('common_prepare_notebook'))
+                subprocess.run("~/scripts/{}.py".format('common_prepare_notebook'), shell=True)
                 instance_running = False
                 while not instance_running:
                     if AzureMeta.get_instance_status(notebook_config['resource_group_name'],
@@ -290,7 +291,7 @@ if __name__ == "__main__":
                 params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}" \
                     .format(instance_hostname, notebook_config['instance_name'], keyfile_name,
                             json.dumps(additional_config), notebook_config['datalab_ssh_user'])
-                local("~/scripts/{}.py {}".format('common_configure_proxy', params))
+                subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
         except Exception as err:
             datalab.fab.append_result("Failed creating image.", str(err))
             AzureActions.remove_instance(notebook_config['resource_group_name'], notebook_config['instance_name'])
@@ -316,7 +317,7 @@ if __name__ == "__main__":
                     notebook_config['exploratory_name'],
                     json.dumps(additional_info))
         try:
-            local("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params))
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True)
         except:
             datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/azure/zeppelin_configure.py b/infrastructure-provisioning/src/general/scripts/azure/zeppelin_configure.py
index 58cb4c5..43ddfec 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/zeppelin_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/zeppelin_configure.py
@@ -29,6 +29,7 @@ import logging
 import os
 import sys
 import traceback
+import subprocess
 from fabric import *
 
 if __name__ == "__main__":
@@ -140,7 +141,7 @@ if __name__ == "__main__":
             notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
 
         try:
-            local("~/scripts/{}.py {}".format('create_ssh_user', params))
+            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -158,7 +159,7 @@ if __name__ == "__main__":
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
                     notebook_config['datalab_ssh_user'])
         try:
-            local("~/scripts/{}.py {}".format('common_configure_proxy', params))
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -175,7 +176,7 @@ if __name__ == "__main__":
             .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['azure_region'],
                     edge_instance_private_hostname)
         try:
-            local("~/scripts/{}.py {}".format('install_prerequisites', params))
+            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -209,7 +210,7 @@ if __name__ == "__main__":
                     os.environ['notebook_multiple_clusters'], os.environ['notebook_r_mirror'], 'null',
                     notebook_config['ip_address'], notebook_config['exploratory_name'], edge_hostname)
         try:
-            local("~/scripts/{}.py {}".format('configure_zeppelin_node', params))
+            subprocess.run("~/scripts/{}.py {}".format('configure_zeppelin_node', params), shell=True)
             datalab.actions_lib.remount_azure_disk(True, notebook_config['datalab_ssh_user'], instance_hostname,
                                                    os.environ['conf_key_dir'] + os.environ['conf_key_name'] + ".pem")
         except:
@@ -228,7 +229,7 @@ if __name__ == "__main__":
         params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
             instance_hostname, keyfile_name, json.dumps(additional_config), notebook_config['datalab_ssh_user'])
         try:
-            local("~/scripts/{}.py {}".format('install_user_key', params))
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -243,7 +244,7 @@ if __name__ == "__main__":
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
         try:
-            local("~/scripts/{}.py {}".format('manage_git_creds', params))
+            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True)
         except:
             datalab.fab.append_result("Failed setup git credentials")
             raise Exception
@@ -260,7 +261,7 @@ if __name__ == "__main__":
                 .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
                         notebook_config['resource_group_name'], notebook_config['instance_name'])
             try:
-                local("~/scripts/{}.py {}".format('common_remove_remote_kernels', params))
+                subprocess.run("~/scripts/{}.py {}".format('common_remove_remote_kernels', params), shell=True)
             except:
                 traceback.print_exc()
                 raise Exception
@@ -284,7 +285,7 @@ if __name__ == "__main__":
                                                         notebook_config['expected_image_name'],
                                                         json.dumps(notebook_config['image_tags']))
                 print("Image was successfully created.")
-                local("~/scripts/{}.py".format('common_prepare_notebook'))
+                subprocess.run("~/scripts/{}.py".format('common_prepare_notebook'), shell=True)
                 instance_running = False
                 while not instance_running:
                     if AzureMeta.get_instance_status(notebook_config['resource_group_name'],
@@ -300,7 +301,7 @@ if __name__ == "__main__":
                 params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}" \
                     .format(instance_hostname, notebook_config['instance_name'], keyfile_name,
                             json.dumps(additional_config), notebook_config['datalab_ssh_user'])
-                local("~/scripts/{}.py {}".format('common_configure_proxy', params))
+                subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
         except Exception as err:
             datalab.fab.append_result("Failed creating image.", str(err))
             AzureActions.remove_instance(notebook_config['resource_group_name'], notebook_config['instance_name'])
@@ -326,7 +327,7 @@ if __name__ == "__main__":
                     notebook_config['exploratory_name'],
                     json.dumps(additional_info))
         try:
-            local("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params))
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True)
         except:
             datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
diff --git a/infrastructure-provisioning/src/tensor-rstudio/fabfile.py b/infrastructure-provisioning/src/tensor-rstudio/fabfile.py
index a19f830..f490177 100644
--- a/infrastructure-provisioning/src/tensor-rstudio/fabfile.py
+++ b/infrastructure-provisioning/src/tensor-rstudio/fabfile.py
@@ -25,6 +25,7 @@ import logging
 import os
 import sys
 import uuid
+import subprocess
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.meta_lib import *
@@ -44,7 +45,7 @@ def run():
 
     try:
         params = "--uuid {}".format(notebook_config['uuid'])
-        local("~/scripts/{}.py {}".format('common_prepare_notebook', params))
+        subprocess.run("~/scripts/{}.py {}".format('common_prepare_notebook', params), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed preparing Notebook node.", str(err))
@@ -52,7 +53,7 @@ def run():
 
     try:
         params = "--uuid {}".format(notebook_config['uuid'])
-        local("~/scripts/{}.py {}".format('tensor-rstudio_configure', params))
+        subprocess.run("~/scripts/{}.py {}".format('tensor-rstudio_configure', params), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed configuring Notebook node.", str(err))
@@ -67,7 +68,7 @@ def terminate():
                         level=logging.DEBUG,
                         filename=local_log_filepath)
     try:
-        local("~/scripts/{}.py".format('common_terminate_notebook'))
+        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed terminating Notebook node.", str(err))
@@ -82,7 +83,7 @@ def stop():
                         level=logging.DEBUG,
                         filename=local_log_filepath)
     try:
-        local("~/scripts/{}.py".format('common_stop_notebook'))
+        subprocess.run("~/scripts/{}.py".format('common_stop_notebook'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed stopping Notebook node.", str(err))
@@ -98,7 +99,7 @@ def start():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('common_start_notebook'))
+        subprocess.run("~/scripts/{}.py".format('common_start_notebook'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed starting Notebook node.", str(err))
@@ -116,7 +117,7 @@ def configure():
 
     try:
         if os.environ['conf_resource'] == 'dataengine':
-            local("~/scripts/{}.py".format('common_notebook_configure_dataengine'))
+            subprocess.run("~/scripts/{}.py".format('common_notebook_configure_dataengine'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed configuring dataengine on Notebook node.", str(err))
@@ -133,7 +134,7 @@ def install_libs():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('notebook_install_libs'))
+        subprocess.run("~/scripts/{}.py".format('notebook_install_libs'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed installing additional libs for Notebook node.", str(err))
@@ -150,7 +151,7 @@ def list_libs():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('notebook_list_libs'))
+        subprocess.run("~/scripts/{}.py".format('notebook_list_libs'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed get available libraries for notebook node.", str(err))
@@ -167,7 +168,7 @@ def git_creds():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('notebook_git_creds'))
+        subprocess.run("~/scripts/{}.py".format('notebook_git_creds'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to manage git credentials for notebook node.", str(err))
@@ -184,7 +185,7 @@ def create_image():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('common_create_notebook_image'))
+        subprocess.run("~/scripts/{}.py".format('common_create_notebook_image'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to create image from notebook node.", str(err))
@@ -201,7 +202,7 @@ def terminate_image():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('common_terminate_notebook_image'))
+        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook_image'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to create image from notebook node.", str(err))
@@ -218,7 +219,7 @@ def reconfigure_spark():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('notebook_reconfigure_spark'))
+        subprocess.run("~/scripts/{}.py".format('notebook_reconfigure_spark'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to reconfigure Spark for Notebook node.", str(err))
@@ -234,7 +235,7 @@ def check_inactivity():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('notebook_inactivity_check'))
+        subprocess.run("~/scripts/{}.py".format('notebook_inactivity_check'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to check inactivity status.", str(err))
diff --git a/integration-tests/examples/copy_files.py b/integration-tests/examples/copy_files.py
index 0295091..66f56ba 100644
--- a/integration-tests/examples/copy_files.py
+++ b/integration-tests/examples/copy_files.py
@@ -23,6 +23,7 @@
 
 import os, sys, json
 import argparse
+import subprocess
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -38,7 +39,7 @@ dataset_file = ['airports.csv', 'carriers.csv', '2008.csv.bz2']
 def download_dataset():
     try:
         for f in dataset_file:
-            local('wget http://stat-computing.org/dataexpo/2009/{0} -O /tmp/{0}'.format(f))
+            subprocess.run('wget http://stat-computing.org/dataexpo/2009/{0} -O /tmp/{0}'.format(f), shell=True)
     except Exception as err:
         print('Failed to download test dataset', str(err))
         sys.exit(1)
@@ -46,7 +47,7 @@ def download_dataset():
 def upload_aws():
     try:
         for f in dataset_file:
-            local('aws s3 cp /tmp/{0} s3://{1}/{2}_dataset/ --sse AES256'.format(f, args.storage, args.notebook))
+            subprocess.run('aws s3 cp /tmp/{0} s3://{1}/{2}_dataset/ --sse AES256'.format(f, args.storage, args.notebook), shell=True)
     except Exception as err:
         print('Failed to upload test dataset to bucket', str(err))
         sys.exit(1)
@@ -89,7 +90,7 @@ def upload_azure_blob():
 def upload_gcp():
     try:
         for f in dataset_file:
-            local('sudo gsutil -m cp /tmp/{0} gs://{1}/{2}_dataset/'.format(f, args.storage, args.notebook))
+            subprocess.run('sudo gsutil -m cp /tmp/{0} gs://{1}/{2}_dataset/'.format(f, args.storage, args.notebook), shell=True)
     except Exception as err:
         print('Failed to upload test dataset to bucket', str(err))
         sys.exit(1)


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org


[incubator-datalab] 01/02: [DATALAB-2091]: partially replaced local with subprocess.run(

Posted by lf...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

lfrolov pushed a commit to branch DATALAB-2091-fab2
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git

commit a92b4a1b9f7f9ab5a6516bbfba07c27e8d505611
Author: leonidfrolov <fr...@gmail.com>
AuthorDate: Mon Feb 8 12:45:36 2021 +0200

    [DATALAB-2091]: partially replaced local with subprocess.run(
---
 .../src/general/scripts/aws/edge_configure.py      | 15 +++----
 .../src/general/scripts/aws/zeppelin_configure.py  | 17 ++++----
 .../scripts/azure/common_prepare_notebook.py       |  3 +-
 .../src/general/scripts/azure/tensor_configure.py  | 21 +++++-----
 infrastructure-provisioning/src/jupyter/fabfile.py | 29 ++++++-------
 .../src/jupyterlab/fabfile.py                      | 27 ++++++------
 .../src/project/scripts/reupload_ssh_key.py        |  3 +-
 infrastructure-provisioning/src/rstudio/fabfile.py | 29 ++++++-------
 .../src/ssn/scripts/configure_gitlab.py            | 49 +++++++++++-----------
 .../src/ssn/scripts/restore.py                     | 41 +++++++++---------
 infrastructure-provisioning/src/tensor/fabfile.py  | 27 ++++++------
 .../examples/scenario_jupyter/jupyter_tests.py     |  6 +--
 12 files changed, 139 insertions(+), 128 deletions(-)

diff --git a/infrastructure-provisioning/src/general/scripts/aws/edge_configure.py b/infrastructure-provisioning/src/general/scripts/aws/edge_configure.py
index 8928814..d600ba9 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/edge_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/edge_configure.py
@@ -30,6 +30,7 @@ import os
 import sys
 import traceback
 import uuid
+import subprocess
 from fabric import *
 
 if __name__ == "__main__":
@@ -141,7 +142,7 @@ if __name__ == "__main__":
             edge_conf['initial_user'], edge_conf['datalab_ssh_user'], edge_conf['sudo_group'])
 
         try:
-            local("~/scripts/{}.py {}".format('create_ssh_user', params))
+            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -157,7 +158,7 @@ if __name__ == "__main__":
             format(edge_conf['instance_hostname'], edge_conf['keyfile_name'], edge_conf['datalab_ssh_user'],
                    os.environ['aws_region'])
         try:
-            local("~/scripts/{}.py {}".format('install_prerequisites', params))
+            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -197,7 +198,7 @@ if __name__ == "__main__":
             edge_conf['instance_hostname'], edge_conf['keyfile_name'], json.dumps(additional_config),
             edge_conf['datalab_ssh_user'])
         try:
-            local("~/scripts/{}.py {}".format('configure_http_proxy', params))
+            subprocess.run("~/scripts/{}.py {}".format('configure_http_proxy', params), shell=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -217,7 +218,7 @@ if __name__ == "__main__":
             edge_conf['instance_hostname'], edge_conf['keyfile_name'], json.dumps(additional_config),
             edge_conf['datalab_ssh_user'])
         try:
-            local("~/scripts/{}.py {}".format('install_user_key', params))
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -237,7 +238,7 @@ if __name__ == "__main__":
                               edge_conf['endpoint_name']),
             edge_conf['keycloak_client_secret'], edge_conf['step_cert_sans'])
         try:
-            local("~/scripts/{}.py {}".format('configure_nginx_reverse_proxy', params))
+            subprocess.run("~/scripts/{}.py {}".format('configure_nginx_reverse_proxy', params), shell=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -254,7 +255,7 @@ if __name__ == "__main__":
                            edge_conf['instance_hostname'], edge_conf['instance_hostname'], edge_conf['project_name'],
                            edge_conf['endpoint_name'], edge_conf['edge_hostname'])
         try:
-            local("~/scripts/{}.py {}".format('configure_keycloak', keycloak_params))
+            subprocess.run("~/scripts/{}.py {}".format('configure_keycloak', keycloak_params), shell=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -273,7 +274,7 @@ if __name__ == "__main__":
                 edge_conf['instance_hostname'], edge_conf['keyfile_name'], json.dumps(additional_config),
                 edge_conf['datalab_ssh_user'])
             try:
-                local("~/scripts/{}.py {}".format('configure_nftables', params))
+                subprocess.run("~/scripts/{}.py {}".format('configure_nftables', params), shell=True)
             except:
                 traceback.print_exc()
                 raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/aws/zeppelin_configure.py b/infrastructure-provisioning/src/general/scripts/aws/zeppelin_configure.py
index cec16c1..794e53e 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/zeppelin_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/zeppelin_configure.py
@@ -30,6 +30,7 @@ import logging
 import os
 import sys
 import traceback
+import subprocess
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -124,7 +125,7 @@ if __name__ == "__main__":
             notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
 
         try:
-            local("~/scripts/{}.py {}".format('create_ssh_user', params))
+            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -142,7 +143,7 @@ if __name__ == "__main__":
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
                     notebook_config['datalab_ssh_user'])
         try:
-            local("~/scripts/{}.py {}".format('common_configure_proxy', params))
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -159,7 +160,7 @@ if __name__ == "__main__":
             .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['aws_region'],
                     edge_instance_private_ip)
         try:
-            local("~/scripts/{}.py {}".format('install_prerequisites', params))
+            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -197,7 +198,7 @@ if __name__ == "__main__":
                     os.environ['notebook_r_mirror'], notebook_config['endpoint_url'], notebook_config['ip_address'],
                     notebook_config['exploratory_name'], edge_ip)
         try:
-            local("~/scripts/{}.py {}".format('configure_zeppelin_node', params))
+            subprocess.run("~/scripts/{}.py {}".format('configure_zeppelin_node', params), shell=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -214,7 +215,7 @@ if __name__ == "__main__":
         params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
             instance_hostname, keyfile_name, json.dumps(additional_config), notebook_config['datalab_ssh_user'])
         try:
-            local("~/scripts/{}.py {}".format('install_user_key', params))
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -229,7 +230,7 @@ if __name__ == "__main__":
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
         try:
-            local("~/scripts/{}.py {}".format('manage_git_creds', params))
+            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True)
         except:
             datalab.fab.append_result("Failed setup git credentials")
             raise Exception
@@ -246,7 +247,7 @@ if __name__ == "__main__":
                 .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
                         notebook_config['tag_name'], notebook_config['instance_name'])
             try:
-                local("~/scripts/{}.py {}".format('common_remove_remote_kernels', params))
+                subprocess.run("~/scripts/{}.py {}".format('common_remove_remote_kernels', params), shell=True)
             except:
                 traceback.print_exc()
                 raise Exception
@@ -266,7 +267,7 @@ if __name__ == "__main__":
             .format(edge_instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], 'zeppelin',
                     notebook_config['exploratory_name'], json.dumps(additional_info))
         try:
-            local("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params))
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True)
         except:
             datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_prepare_notebook.py b/infrastructure-provisioning/src/general/scripts/azure/common_prepare_notebook.py
index 25c7f5a..0a174c1 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_prepare_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_prepare_notebook.py
@@ -29,6 +29,7 @@ import logging
 import os
 import sys
 import traceback
+import subprocess
 from Crypto.PublicKey import RSA
 from fabric import *
 
@@ -168,7 +169,7 @@ if __name__ == "__main__":
                    notebook_config['instance_storage_account_type'], notebook_config['image_name'],
                    notebook_config['image_type'], json.dumps(notebook_config['tags']))
         try:
-            local("~/scripts/{}.py {}".format('common_create_instance', params))
+            subprocess.run("~/scripts/{}.py {}".format('common_create_instance', params), shell=True)
         except:
             traceback.print_exc()
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/azure/tensor_configure.py b/infrastructure-provisioning/src/general/scripts/azure/tensor_configure.py
index f649c50..fc9d12d 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/tensor_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/tensor_configure.py
@@ -29,6 +29,7 @@ import logging
 import os
 import sys
 import traceback
+import subprocess
 from fabric import *
 
 if __name__ == "__main__":
@@ -141,7 +142,7 @@ if __name__ == "__main__":
             notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
 
         try:
-            local("~/scripts/{}.py {}".format('create_ssh_user', params))
+            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -159,7 +160,7 @@ if __name__ == "__main__":
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
                     notebook_config['datalab_ssh_user'])
         try:
-            local("~/scripts/{}.py {}".format('common_configure_proxy', params))
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -176,7 +177,7 @@ if __name__ == "__main__":
             format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['azure_region'],
                    edge_instance_private_hostname)
         try:
-            local("~/scripts/{}.py {}".format('install_prerequisites', params))
+            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -196,7 +197,7 @@ if __name__ == "__main__":
                          os.environ['azure_region'], notebook_config['datalab_ssh_user'],
                          notebook_config['ip_address'], notebook_config['exploratory_name'], edge_hostname)
         try:
-            local("~/scripts/{}.py {}".format('configure_tensor_node', params))
+            subprocess.run("~/scripts/{}.py {}".format('configure_tensor_node', params), shell=True)
             datalab.actions_lib.remount_azure_disk(True, notebook_config['datalab_ssh_user'], instance_hostname,
                                                    os.environ['conf_key_dir'] + os.environ['conf_key_name'] + ".pem")
         except:
@@ -215,7 +216,7 @@ if __name__ == "__main__":
         params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
             instance_hostname, keyfile_name, json.dumps(additional_config), notebook_config['datalab_ssh_user'])
         try:
-            local("~/scripts/{}.py {}".format('install_user_key', params))
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -230,7 +231,7 @@ if __name__ == "__main__":
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
         try:
-            local("~/scripts/{}.py {}".format('manage_git_creds', params))
+            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True)
         except:
             datalab.fab.append_result("Failed setup git credentials")
             raise Exception
@@ -247,7 +248,7 @@ if __name__ == "__main__":
                 .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
                         notebook_config['resource_group_name'], notebook_config['instance_name'])
             try:
-                local("~/scripts/{}.py {}".format('common_remove_remote_kernels', params))
+                subprocess.run("~/scripts/{}.py {}".format('common_remove_remote_kernels', params), shell=True)
             except:
                 traceback.print_exc()
                 raise Exception
@@ -271,7 +272,7 @@ if __name__ == "__main__":
                                                         notebook_config['expected_image_name'],
                                                         json.dumps(notebook_config['image_tags']))
                 print("Image was successfully created.")
-                local("~/scripts/{}.py".format('common_prepare_notebook'))
+                subprocess.run("~/scripts/{}.py".format('common_prepare_notebook'), shell=True)
                 instance_running = False
                 while not instance_running:
                     if AzureMeta.get_instance_status(notebook_config['resource_group_name'],
@@ -287,7 +288,7 @@ if __name__ == "__main__":
                 params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}" \
                     .format(instance_hostname, notebook_config['instance_name'], keyfile_name,
                             json.dumps(additional_config), notebook_config['datalab_ssh_user'])
-                local("~/scripts/{}.py {}".format('common_configure_proxy', params))
+                subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
         except Exception as err:
             datalab.fab.append_result("Failed creating image.", str(err))
             AzureActions.remove_instance(notebook_config['resource_group_name'], notebook_config['instance_name'])
@@ -313,7 +314,7 @@ if __name__ == "__main__":
                     notebook_config['exploratory_name'],
                     json.dumps(additional_info))
         try:
-            local("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params))
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True)
         except:
             datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
diff --git a/infrastructure-provisioning/src/jupyter/fabfile.py b/infrastructure-provisioning/src/jupyter/fabfile.py
index 770ddc7..a9467a0 100644
--- a/infrastructure-provisioning/src/jupyter/fabfile.py
+++ b/infrastructure-provisioning/src/jupyter/fabfile.py
@@ -25,6 +25,7 @@ import logging
 import os
 import sys
 import uuid
+import subprocess
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.meta_lib import *
@@ -44,7 +45,7 @@ def run():
 
     try:
         params = "--uuid {}".format(notebook_config['uuid'])
-        local("~/scripts/{}.py {}".format('common_prepare_notebook', params))
+        subprocess.run("~/scripts/{}.py {}".format('common_prepare_notebook', params), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed preparing Notebook node.", str(err))
@@ -52,7 +53,7 @@ def run():
 
     try:
         params = "--uuid {}".format(notebook_config['uuid'])
-        local("~/scripts/{}.py {}".format('jupyter_configure', params))
+        subprocess.run("~/scripts/{}.py {}".format('jupyter_configure', params), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed configuring Notebook node.", str(err))
@@ -67,7 +68,7 @@ def terminate():
                         level=logging.DEBUG,
                         filename=local_log_filepath)
     try:
-        local("~/scripts/{}.py".format('common_terminate_notebook'))
+        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed terminating Notebook node.", str(err))
@@ -82,7 +83,7 @@ def stop():
                         level=logging.DEBUG,
                         filename=local_log_filepath)
     try:
-        local("~/scripts/{}.py".format('common_stop_notebook'))
+        subprocess.run("~/scripts/{}.py".format('common_stop_notebook'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed stopping Notebook node.", str(err))
@@ -98,7 +99,7 @@ def start():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('common_start_notebook'))
+        subprocess.run("~/scripts/{}.py".format('common_start_notebook'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed starting Notebook node.", str(err))
@@ -115,9 +116,9 @@ def configure():
 
     try:
         if os.environ['conf_resource'] == 'dataengine-service':
-            local("~/scripts/{}.py".format('common_notebook_configure_dataengine-service'))
+            subprocess.run("~/scripts/{}.py".format('common_notebook_configure_dataengine-service'), shell=True)
         elif os.environ['conf_resource'] == 'dataengine':
-            local("~/scripts/{}.py".format('common_notebook_configure_dataengine'))
+            subprocess.run("~/scripts/{}.py".format('common_notebook_configure_dataengine'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed configuring analytical tool on Notebook node.", str(err))
@@ -134,7 +135,7 @@ def install_libs():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('notebook_install_libs'))
+        subprocess.run("~/scripts/{}.py".format('notebook_install_libs'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed installing additional libs for Notebook node.", str(err))
@@ -151,7 +152,7 @@ def list_libs():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('notebook_list_libs'))
+        subprocess.run("~/scripts/{}.py".format('notebook_list_libs'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed get available libraries for notebook node.", str(err))
@@ -168,7 +169,7 @@ def git_creds():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('notebook_git_creds'))
+        subprocess.run("~/scripts/{}.py".format('notebook_git_creds'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to manage git credentials for notebook node.", str(err))
@@ -185,7 +186,7 @@ def create_image():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('common_create_notebook_image'))
+        subprocess.run("~/scripts/{}.py".format('common_create_notebook_image'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to create image from notebook node.", str(err))
@@ -202,7 +203,7 @@ def terminate_image():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('common_terminate_notebook_image'))
+        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook_image'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to create image from notebook node.", str(err))
@@ -219,7 +220,7 @@ def reconfigure_spark():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('notebook_reconfigure_spark'))
+        subprocess.run("~/scripts/{}.py".format('notebook_reconfigure_spark'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to reconfigure Spark for Notebook node.", str(err))
@@ -235,7 +236,7 @@ def check_inactivity():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('notebook_inactivity_check'))
+        subprocess.run("~/scripts/{}.py".format('notebook_inactivity_check'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to check inactivity status.", str(err))
diff --git a/infrastructure-provisioning/src/jupyterlab/fabfile.py b/infrastructure-provisioning/src/jupyterlab/fabfile.py
index 4ce375f..99ecb14 100644
--- a/infrastructure-provisioning/src/jupyterlab/fabfile.py
+++ b/infrastructure-provisioning/src/jupyterlab/fabfile.py
@@ -25,6 +25,7 @@ import logging
 import os
 import sys
 import uuid
+import subprocess
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.meta_lib import *
@@ -44,7 +45,7 @@ def run():
 
     try:
         params = "--uuid {}".format(notebook_config['uuid'])
-        local("~/scripts/{}.py {}".format('common_prepare_notebook', params))
+        subprocess.run("~/scripts/{}.py {}".format('common_prepare_notebook', params), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed preparing Notebook node.", str(err))
@@ -52,7 +53,7 @@ def run():
 
     try:
         params = "--uuid {}".format(notebook_config['uuid'])
-        local("~/scripts/{}.py {}".format('jupyterlab_configure', params))
+        subprocess.run("~/scripts/{}.py {}".format('jupyterlab_configure', params), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed configuring Notebook node.", str(err))
@@ -67,7 +68,7 @@ def terminate():
                         level=logging.DEBUG,
                         filename=local_log_filepath)
     try:
-        local("~/scripts/{}.py".format('common_terminate_notebook'))
+        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed terminating Notebook node.", str(err))
@@ -82,7 +83,7 @@ def stop():
                         level=logging.DEBUG,
                         filename=local_log_filepath)
     try:
-        local("~/scripts/{}.py".format('common_stop_notebook'))
+        subprocess.run("~/scripts/{}.py".format('common_stop_notebook'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed stopping Notebook node.", str(err))
@@ -98,7 +99,7 @@ def start():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('common_start_notebook'))
+        subprocess.run("~/scripts/{}.py".format('common_start_notebook'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed starting Notebook node.", str(err))
@@ -115,9 +116,9 @@ def configure():
 
     try:
         if os.environ['conf_resource'] == 'dataengine-service':
-            local("~/scripts/{}.py".format('common_notebook_configure_dataengine-service'))
+            subprocess.run("~/scripts/{}.py".format('common_notebook_configure_dataengine-service'), shell=True)
         elif os.environ['conf_resource'] == 'dataengine':
-            local("~/scripts/{}.py".format('common_notebook_configure_dataengine'))
+            subprocess.run("~/scripts/{}.py".format('common_notebook_configure_dataengine'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed configuring analytical tool on Notebook node.", str(err))
@@ -134,7 +135,7 @@ def install_libs():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('notebook_install_libs'))
+        subprocess.run("~/scripts/{}.py".format('notebook_install_libs'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed installing additional libs for Notebook node.", str(err))
@@ -151,7 +152,7 @@ def list_libs():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('notebook_list_libs'))
+        subprocess.run("~/scripts/{}.py".format('notebook_list_libs'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed get available libraries for notebook node.", str(err))
@@ -168,7 +169,7 @@ def git_creds():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('notebook_git_creds'))
+        subprocess.run("~/scripts/{}.py".format('notebook_git_creds'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to manage git credentials for notebook node.", str(err))
@@ -185,7 +186,7 @@ def create_image():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('common_create_notebook_image'))
+        subprocess.run("~/scripts/{}.py".format('common_create_notebook_image'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to create image from notebook node.", str(err))
@@ -202,7 +203,7 @@ def terminate_image():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('common_terminate_notebook_image'))
+        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook_image'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to create image from notebook node.", str(err))
@@ -219,7 +220,7 @@ def reconfigure_spark():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('notebook_reconfigure_spark'))
+        subprocess.run("~/scripts/{}.py".format('notebook_reconfigure_spark'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to reconfigure Spark for Notebook node.", str(err))
diff --git a/infrastructure-provisioning/src/project/scripts/reupload_ssh_key.py b/infrastructure-provisioning/src/project/scripts/reupload_ssh_key.py
index a4deccd..e1d5184 100644
--- a/infrastructure-provisioning/src/project/scripts/reupload_ssh_key.py
+++ b/infrastructure-provisioning/src/project/scripts/reupload_ssh_key.py
@@ -25,6 +25,7 @@ import logging
 import os
 import sys
 import traceback
+import subprocess
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.meta_lib import *
@@ -57,7 +58,7 @@ if __name__ == "__main__":
                 os.environ['conf_resource'], reupload_config['resource_id'],
                 reupload_config['os_user'],  reupload_config['keyfile'],
                 json.dumps(reupload_config['additional_config']))
-            local("~/scripts/{}.py {}".format('common_reupload_key', params))
+            subprocess.run("~/scripts/{}.py {}".format('common_reupload_key', params), shell=True)
         except Exception as err:
             traceback.print_exc()
             raise Exception
diff --git a/infrastructure-provisioning/src/rstudio/fabfile.py b/infrastructure-provisioning/src/rstudio/fabfile.py
index fa7149e..205e1d2 100644
--- a/infrastructure-provisioning/src/rstudio/fabfile.py
+++ b/infrastructure-provisioning/src/rstudio/fabfile.py
@@ -25,6 +25,7 @@ import logging
 import os
 import sys
 import uuid
+import subprocess
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.meta_lib import *
@@ -44,7 +45,7 @@ def run():
 
     try:
         params = "--uuid {}".format(notebook_config['uuid'])
-        local("~/scripts/{}.py {}".format('common_prepare_notebook', params))
+        subprocess.run("~/scripts/{}.py {}".format('common_prepare_notebook', params), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed preparing Notebook node.", str(err))
@@ -52,7 +53,7 @@ def run():
 
     try:
         params = "--uuid {}".format(notebook_config['uuid'])
-        local("~/scripts/{}.py {}".format('rstudio_configure', params))
+        subprocess.run("~/scripts/{}.py {}".format('rstudio_configure', params), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed configuring Notebook node.", str(err))
@@ -67,7 +68,7 @@ def terminate():
                         level=logging.DEBUG,
                         filename=local_log_filepath)
     try:
-        local("~/scripts/{}.py".format('common_terminate_notebook'))
+        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed terminating Notebook node.", str(err))
@@ -82,7 +83,7 @@ def stop():
                         level=logging.DEBUG,
                         filename=local_log_filepath)
     try:
-        local("~/scripts/{}.py".format('common_stop_notebook'))
+        subprocess.run("~/scripts/{}.py".format('common_stop_notebook'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed stopping Notebook node.", str(err))
@@ -98,7 +99,7 @@ def start():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('common_start_notebook'))
+        subprocess.run("~/scripts/{}.py".format('common_start_notebook'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed starting Notebook node.", str(err))
@@ -115,9 +116,9 @@ def configure():
 
     try:
         if os.environ['conf_resource'] == 'dataengine-service':
-            local("~/scripts/{}.py".format('common_notebook_configure_dataengine-service'))
+            subprocess.run("~/scripts/{}.py".format('common_notebook_configure_dataengine-service'), shell=True)
         elif os.environ['conf_resource'] == 'dataengine':
-            local("~/scripts/{}.py".format('common_notebook_configure_dataengine'))
+            subprocess.run("~/scripts/{}.py".format('common_notebook_configure_dataengine'), shell=True)
     except Exception as err:
         append_result("Failed configuring analytical tool on Notebook node.", str(err))
         sys.exit(1)
@@ -133,7 +134,7 @@ def install_libs():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('notebook_install_libs'))
+        subprocess.run("~/scripts/{}.py".format('notebook_install_libs'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed installing additional libs for Notebook node.", str(err))
@@ -150,7 +151,7 @@ def list_libs():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('notebook_list_libs'))
+        subprocess.run("~/scripts/{}.py".format('notebook_list_libs'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed get available libraries for notebook node.", str(err))
@@ -167,7 +168,7 @@ def git_creds():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('notebook_git_creds'))
+        subprocess.run("~/scripts/{}.py".format('notebook_git_creds'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to manage git credentials for notebook node.", str(err))
@@ -184,7 +185,7 @@ def create_image():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('common_create_notebook_image'))
+        subprocess.run("~/scripts/{}.py".format('common_create_notebook_image'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to create image from notebook node.", str(err))
@@ -201,7 +202,7 @@ def terminate_image():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('common_terminate_notebook_image'))
+        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook_image'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to create image from notebook node.", str(err))
@@ -218,7 +219,7 @@ def reconfigure_spark():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('notebook_reconfigure_spark'))
+        subprocess.run("~/scripts/{}.py".format('notebook_reconfigure_spark'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to reconfigure Spark for Notebook node.", str(err))
@@ -234,7 +235,7 @@ def check_inactivity():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('notebook_inactivity_check'))
+        subprocess.run("~/scripts/{}.py".format('notebook_inactivity_check'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to check inactivity status.", str(err))
diff --git a/infrastructure-provisioning/src/ssn/scripts/configure_gitlab.py b/infrastructure-provisioning/src/ssn/scripts/configure_gitlab.py
index 5393344..ccfb98a 100644
--- a/infrastructure-provisioning/src/ssn/scripts/configure_gitlab.py
+++ b/infrastructure-provisioning/src/ssn/scripts/configure_gitlab.py
@@ -25,6 +25,7 @@ import argparse
 import json
 import os
 import sys
+import subprocess
 from fabric import *
 from datalab.fab import *
 
@@ -66,35 +67,35 @@ def prepare_config():
     try:
         with lcd('{}tmp/gitlab'.format(os.environ['conf_datalab_path'])):
             if os.path.exists('{}tmp/gitlab/gitlab.rb.bak'.format(os.environ['conf_datalab_path'])):
-                local('cp gitlab.rb.bak gitlab.rb')
+                subprocess.run('cp gitlab.rb.bak gitlab.rb', shell=True)
             else:
-                local('cp gitlab.rb gitlab.rb.bak')
+                subprocess.run('cp gitlab.rb gitlab.rb.bak', shell=True)
             if json.loads(os.environ['gitlab_ssl_enabled']):
-                local('sed -i "s,EXTERNAL_URL,https://{}:443,g" gitlab.rb'.format(os.environ['instance_hostname']))
-                local('sed -i "s/.*NGINX_ENABLED/nginx[\'enable\'] = true/g" gitlab.rb')
-                local(
+                subprocess.run('sed -i "s,EXTERNAL_URL,https://{}:443,g" gitlab.rb'.format(os.environ['instance_hostname']), shell=True)
+                subprocess.run('sed -i "s/.*NGINX_ENABLED/nginx[\'enable\'] = true/g" gitlab.rb', shell=True)
+                subprocess.run(
                     'sed -i "s,.*NGINX_SSL_CERTIFICATE_KEY,nginx[\'ssl_certificate_key\'] = \'{}\',g" gitlab.rb'.format(
-                        os.environ['gitlab_ssl_certificate_key']))
-                local('sed -i "s,.*NGINX_SSL_CERTIFICATE,nginx[\'ssl_certificate\'] = \'{}\',g" gitlab.rb'.format(
-                    os.environ['gitlab_ssl_certificate']))
-                local('sed -i "s,.*NGINX_SSL_DHPARAMS.*,nginx[\'ssl_dhparam\'] = \'{}\',g" gitlab.rb'.format(
+                        os.environ['gitlab_ssl_certificate_key']), shell=True)
+                subprocess.run('sed -i "s,.*NGINX_SSL_CERTIFICATE,nginx[\'ssl_certificate\'] = \'{}\',g" gitlab.rb'.format(
+                    os.environ['gitlab_ssl_certificate']), shell=True)
+                subprocess.run('sed -i "s,.*NGINX_SSL_DHPARAMS.*,nginx[\'ssl_dhparam\'] = \'{}\',g" gitlab.rb'.format(
                     os.environ['gitlab_ssl_dhparams']))
                 if json.loads(os.environ['gitlab_https_redirect_enabled']):
-                    local('sed -i "s,.*NGINX_REDIRECT_TO_HTTPS,nginx[\'redirect_http_to_https\'] = true,g" gitlab.rb')
-                    local('sed -i "s,.*NGINX_REDIRECT_PORT,nginx[\'redirect_http_to_https_port\'] = 80,g" gitlab.rb')
+                    subprocess.run('sed -i "s,.*NGINX_REDIRECT_TO_HTTPS,nginx[\'redirect_http_to_https\'] = true,g" gitlab.rb', shell=True)
+                    subprocess.run('sed -i "s,.*NGINX_REDIRECT_PORT,nginx[\'redirect_http_to_https_port\'] = 80,g" gitlab.rb', shell=True)
             else:
-                local('sed -i "s,EXTERNAL_URL,http://{},g" gitlab.rb'.format(os.environ['instance_hostname']))
-
-            local('sed -i "s/LDAP_HOST/{}/g" gitlab.rb'.format(os.environ['ldap_hostname']))
-            local('sed -i "s/LDAP_PORT/{}/g" gitlab.rb'.format(os.environ['ldap_port']))
-            local('sed -i "s/LDAP_UID/{}/g" gitlab.rb'.format(os.environ['ldap_uid']))
-            local('sed -i "s/LDAP_BIND_DN/{}/g" gitlab.rb'.format(os.environ['ldap_bind_dn']))
-            local("sed -i 's/LDAP_PASSWORD/{}/g' gitlab.rb".format(os.environ['ldap_password']))
-            local('sed -i "s/LDAP_BASE/{}/g" gitlab.rb'.format(os.environ['ldap_base']))
-            local("sed -i 's/LDAP_ATTR_USERNAME/{}/g' gitlab.rb".format(os.environ['ldap_attr_username']))
-            local("sed -i 's/LDAP_ATTR_EMAIL/{}/g' gitlab.rb".format(os.environ['ldap_attr_email']))
-
-            local("sed -i 's/GITLAB_ROOT_PASSWORD/{}/g' gitlab.rb".format(os.environ['gitlab_root_password']))
+                subprocess.run('sed -i "s,EXTERNAL_URL,http://{},g" gitlab.rb'.format(os.environ['instance_hostname']), shell=True)
+
+            subprocess.run('sed -i "s/LDAP_HOST/{}/g" gitlab.rb'.format(os.environ['ldap_hostname']), shell=True)
+            subprocess.run('sed -i "s/LDAP_PORT/{}/g" gitlab.rb'.format(os.environ['ldap_port']), shell=True)
+            subprocess.run('sed -i "s/LDAP_UID/{}/g" gitlab.rb'.format(os.environ['ldap_uid']), shell=True)
+            subprocess.run('sed -i "s/LDAP_BIND_DN/{}/g" gitlab.rb'.format(os.environ['ldap_bind_dn']), shell=True)
+            subprocess.run("sed -i 's/LDAP_PASSWORD/{}/g' gitlab.rb".format(os.environ['ldap_password']), shell=True)
+            subprocess.run('sed -i "s/LDAP_BASE/{}/g" gitlab.rb'.format(os.environ['ldap_base']), shell=True)
+            subprocess.run("sed -i 's/LDAP_ATTR_USERNAME/{}/g' gitlab.rb".format(os.environ['ldap_attr_username']), shell=True)
+            subprocess.run("sed -i 's/LDAP_ATTR_EMAIL/{}/g' gitlab.rb".format(os.environ['ldap_attr_email']), shell=True)
+
+            subprocess.run("sed -i 's/GITLAB_ROOT_PASSWORD/{}/g' gitlab.rb".format(os.environ['gitlab_root_password']), shell=True)
         print('Initial config is ready.')
     except Exception as err:
         print('Failed to install gitlab.{}'.format(str(err)))
@@ -116,7 +117,7 @@ def install_gitlab():
 
         with lcd('{}tmp/gitlab'.format(os.environ['conf_datalab_path'])):
             conn.put('gitlab.rb', '/tmp/gitlab.rb')
-            local('rm gitlab.rb')
+            subprocess.run('rm gitlab.rb', shell=True)
         conn.sudo('rm /etc/gitlab/gitlab.rb')
         conn.sudo('mv /tmp/gitlab.rb /etc/gitlab/gitlab.rb')
 
diff --git a/infrastructure-provisioning/src/ssn/scripts/restore.py b/infrastructure-provisioning/src/ssn/scripts/restore.py
index 5395e72..21fd024 100644
--- a/infrastructure-provisioning/src/ssn/scripts/restore.py
+++ b/infrastructure-provisioning/src/ssn/scripts/restore.py
@@ -26,6 +26,7 @@ import filecmp
 import os
 import sys
 import yaml
+import subprocess
 from fabric import *
 
 parser = argparse.ArgumentParser(description="Restore script for DataLab configs, keys, certs, jars & database")
@@ -73,15 +74,15 @@ def restore_prepare():
                 print("Use folder path '{}' in --file key".format(temp_folder))
                 raise Exception
             print("Backup acrhive will be unpacked to: {}".format(temp_folder))
-            local("mkdir {}".format(temp_folder))
-            local("tar -xf {0} -C {1}".format(backup_file, temp_folder))
+            subprocess.run("mkdir {}".format(temp_folder), shell=True)
+            subprocess.run("tar -xf {0} -C {1}".format(backup_file, temp_folder), shell=True)
         elif os.path.isdir(backup_file):
             temp_folder = backup_file
         else:
             print("Please, specify file or folder. Try --help for more details.")
             raise Exception
         print("Backup acrhive: {} contains following files (exclude logs):".format(backup_file))
-        local("find {} -not -name '*log'".format(temp_folder))
+        subprocess.run("find {} -not -name '*log'".format(temp_folder), shell=True)
     except Exception as err:
         print("Failed to open backup.{}".format(str(err)))
         sys.exit(1)
@@ -90,14 +91,14 @@ def restore_prepare():
         if ask("Maybe you want to create backup of existing configuration before restoring?"):
             with settings(hide('everything')):
                 print("Creating new backup...")
-                local("python3 backup.py --configs all --keys all --certs all --jar all --db")
+                subprocess.run("python3 backup.py --configs all --keys all --certs all --jar all --db", shell=True)
     except:
         print("Failed to create new backup.")
         sys.exit(1)
 
     try:
         if ask("Stop all services before restoring?"):
-            local("sudo supervisorctl stop all")
+            subprocess.run("sudo supervisorctl stop all", shell=True)
         else:
             raise Exception
     except:
@@ -130,14 +131,14 @@ def restore_configs():
                         destfile = "{0}{1}{2}".format(args.datalab_path, conf_folder, filename)
                         if not filecmp.cmp(backupfile, destfile):
                             if ask("Config {} was changed, rewrite it?".format(filename)):
-                                local("cp -f {0} {1}".format(backupfile, destfile))
+                                subprocess.run("cp -f {0} {1}".format(backupfile, destfile), shell=True)
                             else:
                                 print("Config {} was skipped.".format(destfile))
                         else:
                             print("Config {} was not changed. Skipped.".format(filename))
                     else:
                         print("Config {} does not exist. Creating.".format(filename))
-                        local("cp {0}{1}{2} {3}{1}{2}".format(temp_folder, conf_folder, filename, args.datalab_path))
+                        subprocess.run("cp {0}{1}{2} {3}{1}{2}".format(temp_folder, conf_folder, filename, args.datalab_path), shell=True)
     except:
         print("Restore configs failed.")
 
@@ -164,14 +165,14 @@ def restore_keys():
                         print("Key {} already exist.".format(filename))
                         if not filecmp.cmp("{0}keys/{1}".format(temp_folder, filename), "{0}{1}".format(keys_folder, filename)):
                             if ask("Key {} was changed, rewrite it?".format(filename)):
-                                local("cp -f {0}keys/{2} {1}{2}".format(temp_folder, keys_folder, filename))
+                                subprocess.run("cp -f {0}keys/{2} {1}{2}".format(temp_folder, keys_folder, filename), shell=True)
                             else:
                                 print("Key {} was skipped.".format(filename))
                         else:
                             print("Key {} was not changed. Skipped.".format(filename))
                     else:
                         print("Key {} does not exist. Creating.".format(filename))
-                        local("cp {0}keys/{2} {1}{2}".format(temp_folder, keys_folder, filename))
+                        subprocess.run("cp {0}keys/{2} {1}{2}".format(temp_folder, keys_folder, filename), shell=True)
     except:
         print("Restore keys failed.")
 
@@ -198,16 +199,16 @@ def restore_certs():
                         print("Cert {} already exist.".format(filename))
                         if not filecmp.cmp("{0}certs/{1}".format(temp_folder, filename), "{0}{1}".format(certs_folder, filename)):
                             if ask("Cert {} was changed, rewrite it?".format(filename)):
-                                local("sudo cp -f {0}certs/{2} {1}{2}".format(temp_folder, certs_folder, filename))
-                                local("sudo chown {0}:{0} {1}{2}".format("root", certs_folder, filename))
+                                subprocess.run("sudo cp -f {0}certs/{2} {1}{2}".format(temp_folder, certs_folder, filename), shell=True)
+                                subprocess.run("sudo chown {0}:{0} {1}{2}".format("root", certs_folder, filename), shell=True)
                             else:
                                 print("Cert {} was skipped.".format(filename))
                         else:
                             print("Cert {} was not changed. Skipped.".format(filename))
                     else:
                         print("Cert {} does not exist. Creating.".format(filename))
-                        local("sudo cp {0}certs/{2} {1}{2}".format(temp_folder, certs_folder, filename))
-                        local("sudo chown {0}:{0} {1}{2}".format("root", certs_folder, filename))
+                        subprocess.run("sudo cp {0}certs/{2} {1}{2}".format(temp_folder, certs_folder, filename), shell=True)
+                        subprocess.run("sudo chown {0}:{0} {1}{2}".format("root", certs_folder, filename), shell=True)
     except:
         print("Restore certs failed.")
 
@@ -238,15 +239,15 @@ def restore_jars():
                                 destfile = "{0}{1}{2}/{3}".format(args.datalab_path, jars_folder, service, filename)
                                 if not filecmp.cmp(backupfile, destfile):
                                     if ask("Jar {} was changed, rewrite it?".format(filename)):
-                                        local("cp -fP {0} {1}".format(backupfile, destfile))
+                                        subprocess.run("cp -fP {0} {1}".format(backupfile, destfile), shell=True)
                                     else:
                                         print("Jar {} was skipped.".format(destfile))
                                 else:
                                     print("Jar {} was not changed. Skipped.".format(filename))
                             else:
                                 print("Jar {} does not exist. Creating.".format(filename))
-                                local("cp -P {0}jars/{1}/{2} {3}{4}{1}".format(temp_folder, service, filename,
-                                                                               args.datalab_path, jars_folder))
+                                subprocess.run("cp -P {0}jars/{1}/{2} {3}{4}{1}".format(temp_folder, service, filename,
+                                                                               args.datalab_path, jars_folder), shell=True)
     except:
         print("Restore jars failed.")
 
@@ -263,9 +264,9 @@ def restore_database():
                     ssn_conf = open(args.datalab_path + conf_folder + 'ssn.yml').read()
                     data = yaml.load("mongo" + ssn_conf.split("mongo")[-1])
                     print("Restoring database from backup")
-                    local("mongorestore --drop --host {0} --port {1} --archive={2}/mongo.db --username {3} --password '{4}' --authenticationDatabase={5}" \
+                    subprocess.run("mongorestore --drop --host {0} --port {1} --archive={2}/mongo.db --username {3} --password '{4}' --authenticationDatabase={5}" \
                             .format(data['mongo']['host'], data['mongo']['port'], temp_folder,
-                                    data['mongo']['username'], data['mongo']['password'], data['mongo']['database']))
+                                    data['mongo']['username'], data['mongo']['password'], data['mongo']['database']), shell=True)
         else:
             print("Restore database was skipped.")
     except:
@@ -275,13 +276,13 @@ def restore_database():
 def restore_finalize():
     try:
         if ask("Start all services after restoring?"):
-            local("sudo supervisorctl start all")
+            subprocess.run("sudo supervisorctl start all", shell=True)
     except:
         print("Failed to start all services.")
 
     try:
         if ask("Clean temporary folder {}?".format(temp_folder)) and temp_folder != "/":
-            local("rm -rf {}".format(temp_folder))
+            subprocess.run("rm -rf {}".format(temp_folder), shell=True)
     except Exception as err:
         print("Clear temp folder failed. {}".format(str(err)))
 
diff --git a/infrastructure-provisioning/src/tensor/fabfile.py b/infrastructure-provisioning/src/tensor/fabfile.py
index 923f3c2..4d47869 100644
--- a/infrastructure-provisioning/src/tensor/fabfile.py
+++ b/infrastructure-provisioning/src/tensor/fabfile.py
@@ -25,6 +25,7 @@ import logging
 import os
 import sys
 import uuid
+import subprocess
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.meta_lib import *
@@ -44,7 +45,7 @@ def run():
 
     try:
         params = "--uuid {}".format(notebook_config['uuid'])
-        local("~/scripts/{}.py {}".format('common_prepare_notebook', params))
+        subprocess.run("~/scripts/{}.py {}".format('common_prepare_notebook', params), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed preparing Notebook node.", str(err))
@@ -52,7 +53,7 @@ def run():
 
     try:
         params = "--uuid {}".format(notebook_config['uuid'])
-        local("~/scripts/{}.py {}".format('tensor_configure', params))
+        subprocess.run("~/scripts/{}.py {}".format('tensor_configure', params), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed configuring Notebook node.", str(err))
@@ -67,7 +68,7 @@ def terminate():
                         level=logging.DEBUG,
                         filename=local_log_filepath)
     try:
-        local("~/scripts/{}.py".format('common_terminate_notebook'))
+        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed terminating Notebook node.", str(err))
@@ -82,7 +83,7 @@ def stop():
                         level=logging.DEBUG,
                         filename=local_log_filepath)
     try:
-        local("~/scripts/{}.py".format('common_stop_notebook'))
+        subprocess.run("~/scripts/{}.py".format('common_stop_notebook'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed stopping Notebook node.", str(err))
@@ -98,7 +99,7 @@ def start():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('common_start_notebook'))
+        subprocess.run("~/scripts/{}.py".format('common_start_notebook'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed starting Notebook node.", str(err))
@@ -116,7 +117,7 @@ def configure():
 
     try:
         if os.environ['conf_resource'] == 'dataengine':
-            local("~/scripts/{}.py".format('common_notebook_configure_dataengine'))
+            subprocess.run("~/scripts/{}.py".format('common_notebook_configure_dataengine'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed configuring dataengine on Notebook node.", str(err))
@@ -133,7 +134,7 @@ def install_libs():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('notebook_install_libs'))
+        subprocess.run("~/scripts/{}.py".format('notebook_install_libs'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed installing additional libs for Notebook node.", str(err))
@@ -150,7 +151,7 @@ def list_libs():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('notebook_list_libs'))
+        subprocess.run("~/scripts/{}.py".format('notebook_list_libs'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed get available libraries for notebook node.", str(err))
@@ -167,7 +168,7 @@ def git_creds():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('notebook_git_creds'))
+        subprocess.run("~/scripts/{}.py".format('notebook_git_creds'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to manage git credentials for notebook node.", str(err))
@@ -184,7 +185,7 @@ def create_image():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('common_create_notebook_image'))
+        subprocess.run("~/scripts/{}.py".format('common_create_notebook_image'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to create image from notebook node.", str(err))
@@ -201,7 +202,7 @@ def terminate_image():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('common_terminate_notebook_image'))
+        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook_image'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to create image from notebook node.", str(err))
@@ -218,7 +219,7 @@ def reconfigure_spark():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('notebook_reconfigure_spark'))
+        subprocess.run("~/scripts/{}.py".format('notebook_reconfigure_spark'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to reconfigure Spark for Notebook node.", str(err))
@@ -234,7 +235,7 @@ def check_inactivity():
                         filename=local_log_filepath)
 
     try:
-        local("~/scripts/{}.py".format('notebook_inactivity_check'))
+        subprocess.run("~/scripts/{}.py".format('notebook_inactivity_check'), shell=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to check inactivity status.", str(err))
diff --git a/integration-tests/examples/scenario_jupyter/jupyter_tests.py b/integration-tests/examples/scenario_jupyter/jupyter_tests.py
index e293d23..ce30ebd 100644
--- a/integration-tests/examples/scenario_jupyter/jupyter_tests.py
+++ b/integration-tests/examples/scenario_jupyter/jupyter_tests.py
@@ -24,7 +24,7 @@
 import os, sys, json
 from fabric import *
 import argparse
-
+import subprocess
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--storage', type=str, default='')
@@ -37,7 +37,7 @@ args = parser.parse_args()
 
 
 def prepare_templates():
-    local('mv /tmp/jupyter /home/{0}/test_templates'.format(args.os_user))
+    subprocess.run('mv /tmp/jupyter /home/{0}/test_templates'.format(args.os_user), shell=True)
 
 def get_storage():
     storages = {"aws": args.storage,
@@ -59,7 +59,7 @@ def prepare_ipynb(kernel_name, template_path, ipynb_name):
         f.write(text)
 
 def run_ipynb(ipynb_name):
-    local('jupyter nbconvert --ExecutePreprocessor.timeout=-1 --ExecutePreprocessor.startup_timeout=300 --execute /home/{}/{}.ipynb'.format(args.os_user, ipynb_name))
+    subprocess.run('jupyter nbconvert --ExecutePreprocessor.timeout=-1 --ExecutePreprocessor.startup_timeout=300 --execute /home/{}/{}.ipynb'.format(args.os_user, ipynb_name), shell=True)
 
 def run_pyspark():
     interpreters = ['pyspark_local', 'pyspark_' + args.cluster_name]


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org