You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@datalab.apache.org by lf...@apache.org on 2021/02/17 08:57:35 UTC

[incubator-datalab] branch DATALAB-2091 updated (dfaa48a -> 3dcfc4e)

This is an automated email from the ASF dual-hosted git repository.

lfrolov pushed a change to branch DATALAB-2091
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git.


    from dfaa48a  [DATALAB-2091]: changed how sudoers file is changed
     new bb64257  [DATALAB-2091]: added check of subprocess.run output
     new 3dcfc4e  [DATALAB-2091]: removed unnecessary check

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../scripts/deploy_datalab.py                      |   8 +-
 .../scripts/deploy_keycloak/deploy_keycloak.py     |   4 +-
 .../scripts/update_amazon_repositories.py          |  24 +--
 .../scripts/post-deployment_configuration.py       | 104 +++++------
 infrastructure-provisioning/src/base/entrypoint.py |  32 ++--
 .../src/base/scripts/install_user_key.py           |   2 +-
 .../src/dataengine-service/fabfile.py              |  10 +-
 .../src/dataengine/fabfile.py                      |  16 +-
 .../src/deeplearning/fabfile.py                    |  26 +--
 infrastructure-provisioning/src/edge/fabfile.py    |   6 +-
 .../src/edge/scripts/reupload_ssh_key.py           |   2 +-
 .../src/general/api/check_inactivity.py            |   4 +-
 .../src/general/api/configure.py                   |   4 +-
 .../src/general/api/create.py                      |   4 +-
 .../src/general/api/create_image.py                |   4 +-
 .../src/general/api/git_creds.py                   |   4 +-
 .../src/general/api/install_libs.py                |   4 +-
 .../src/general/api/list_libs.py                   |   4 +-
 .../src/general/api/reconfigure_spark.py           |   4 +-
 .../src/general/api/recreate.py                    |   4 +-
 .../src/general/api/reupload_key.py                |   4 +-
 .../src/general/api/start.py                       |   4 +-
 .../src/general/api/status.py                      |   4 +-
 .../src/general/api/stop.py                        |   4 +-
 .../src/general/api/terminate.py                   |   4 +-
 .../src/general/api/terminate_image.py             |   4 +-
 .../src/general/lib/aws/actions_lib.py             | 208 ++++++++++-----------
 .../src/general/lib/aws/meta_lib.py                |   2 +-
 .../src/general/lib/azure/actions_lib.py           |  58 +++---
 .../src/general/lib/gcp/actions_lib.py             | 158 ++++++++--------
 .../src/general/lib/gcp/meta_lib.py                |   2 +-
 .../src/general/lib/os/debian/common_lib.py        |   6 +-
 .../src/general/lib/os/fab.py                      |  30 +--
 .../src/general/lib/os/redhat/common_lib.py        |   6 +-
 .../src/general/lib/os/redhat/notebook_lib.py      |  12 +-
 ...common_notebook_configure_dataengine-service.py |   4 +-
 .../aws/common_notebook_configure_dataengine.py    |   4 +-
 .../general/scripts/aws/common_prepare_notebook.py |   2 +-
 .../src/general/scripts/aws/common_reupload_key.py |   2 +-
 .../general/scripts/aws/common_start_notebook.py   |   4 +-
 .../scripts/aws/dataengine-service_configure.py    |   8 +-
 .../scripts/aws/dataengine-service_install_libs.py |   2 +-
 .../scripts/aws/dataengine-service_list_libs.py    |   2 +-
 .../scripts/aws/dataengine-service_prepare.py      |  10 +-
 .../general/scripts/aws/dataengine_configure.py    |  26 +--
 .../src/general/scripts/aws/dataengine_prepare.py  |   4 +-
 .../src/general/scripts/aws/dataengine_start.py    |   2 +-
 .../general/scripts/aws/deeplearning_configure.py  |  16 +-
 .../src/general/scripts/aws/edge_configure.py      |  14 +-
 .../src/general/scripts/aws/edge_status.py         |   2 +-
 .../src/general/scripts/aws/jupyter_configure.py   |  18 +-
 .../jupyter_dataengine-service_create_configs.py   |  80 ++++----
 .../jupyter_install_dataengine-service_kernels.py  |   2 +-
 .../general/scripts/aws/jupyterlab_configure.py    |  22 +--
 .../src/general/scripts/aws/odahu_deploy.py        |  10 +-
 .../src/general/scripts/aws/odahu_prepare.py       |   2 +-
 .../src/general/scripts/aws/project_prepare.py     |  28 +--
 .../src/general/scripts/aws/rstudio_configure.py   |  16 +-
 .../rstudio_dataengine-service_create_configs.py   |  38 ++--
 .../src/general/scripts/aws/ssn_configure.py       |  14 +-
 .../src/general/scripts/aws/ssn_prepare.py         |  18 +-
 .../src/general/scripts/aws/ssn_terminate.py       |   2 +-
 .../scripts/aws/tensor-rstudio_configure.py        |  16 +-
 .../src/general/scripts/aws/tensor_configure.py    |  16 +-
 .../src/general/scripts/aws/zeppelin_configure.py  |  16 +-
 .../zeppelin_dataengine-service_create_configs.py  |  18 +-
 .../scripts/azure/common_create_notebook_image.py  |   4 +-
 .../azure/common_notebook_configure_dataengine.py  |   4 +-
 .../scripts/azure/common_prepare_notebook.py       |   2 +-
 .../general/scripts/azure/common_reupload_key.py   |   2 +-
 .../general/scripts/azure/common_start_notebook.py |   4 +-
 .../general/scripts/azure/dataengine_configure.py  |  26 +--
 .../general/scripts/azure/dataengine_prepare.py    |   4 +-
 .../src/general/scripts/azure/dataengine_start.py  |   2 +-
 .../scripts/azure/deeplearning_configure.py        |  20 +-
 .../src/general/scripts/azure/edge_configure.py    |  12 +-
 .../src/general/scripts/azure/edge_prepare.py      |  16 +-
 .../src/general/scripts/azure/edge_status.py       |   2 +-
 .../src/general/scripts/azure/jupyter_configure.py |  22 +--
 .../general/scripts/azure/jupyterlab_configure.py  |  24 +--
 .../src/general/scripts/azure/project_prepare.py   |  22 +--
 .../src/general/scripts/azure/rstudio_configure.py |  20 +-
 .../src/general/scripts/azure/ssn_configure.py     |  12 +-
 .../src/general/scripts/azure/ssn_prepare.py       |  16 +-
 .../src/general/scripts/azure/tensor_configure.py  |  20 +-
 .../general/scripts/azure/zeppelin_configure.py    |  20 +-
 ...common_notebook_configure_dataengine-service.py |   4 +-
 .../gcp/common_notebook_configure_dataengine.py    |   4 +-
 .../general/scripts/gcp/common_prepare_notebook.py |   2 +-
 .../src/general/scripts/gcp/common_reupload_key.py |   2 +-
 .../general/scripts/gcp/common_start_notebook.py   |   4 +-
 .../scripts/gcp/dataengine-service_configure.py    |   4 +-
 .../scripts/gcp/dataengine-service_install_libs.py |   2 +-
 .../scripts/gcp/dataengine-service_list_libs.py    |   2 +-
 .../scripts/gcp/dataengine-service_prepare.py      |  10 +-
 .../general/scripts/gcp/dataengine_configure.py    |  22 +--
 .../src/general/scripts/gcp/dataengine_prepare.py  |   4 +-
 .../src/general/scripts/gcp/dataengine_start.py    |   2 +-
 .../general/scripts/gcp/deeplearning_configure.py  |  16 +-
 .../src/general/scripts/gcp/edge_configure.py      |  14 +-
 .../src/general/scripts/gcp/edge_status.py         |   2 +-
 .../src/general/scripts/gcp/jupyter_configure.py   |  16 +-
 .../jupyter_dataengine-service_create_configs.py   |  38 ++--
 .../general/scripts/gcp/jupyterlab_configure.py    |  20 +-
 .../src/general/scripts/gcp/project_prepare.py     |  22 +--
 .../src/general/scripts/gcp/rstudio_configure.py   |  16 +-
 .../rstudio_dataengine-service_create_configs.py   |  34 ++--
 .../rstudio_install_dataengine-service_kernels.py  |   2 +-
 .../src/general/scripts/gcp/ssn_configure.py       |  14 +-
 .../src/general/scripts/gcp/ssn_prepare.py         |  12 +-
 .../src/general/scripts/gcp/ssn_terminate.py       |   2 +-
 .../src/general/scripts/gcp/superset_configure.py  |  18 +-
 .../scripts/gcp/tensor-rstudio_configure.py        |  16 +-
 .../src/general/scripts/gcp/tensor_configure.py    |  16 +-
 .../src/general/scripts/gcp/zeppelin_configure.py  |  16 +-
 .../zeppelin_dataengine-service_create_configs.py  |  18 +-
 .../general/scripts/os/dataengine_install_libs.py  |   4 +-
 .../src/general/scripts/os/dataengine_list_libs.py |   2 +-
 .../scripts/os/dataengine_reconfigure_spark.py     |   6 +-
 .../os/deeplearning_dataengine_create_configs.py   |  42 ++---
 .../os/jupyter_dataengine_create_configs.py        |  70 +++----
 .../src/general/scripts/os/notebook_git_creds.py   |   4 +-
 .../scripts/os/notebook_inactivity_check.py        |   2 +-
 .../general/scripts/os/notebook_install_libs.py    |   2 +-
 .../src/general/scripts/os/notebook_list_libs.py   |   2 +-
 .../scripts/os/notebook_reconfigure_spark.py       |   2 +-
 .../os/rstudio_dataengine_create_configs.py        |  34 ++--
 .../os/tensor-rstudio_dataengine_create_configs.py |  34 ++--
 .../scripts/os/tensor_dataengine_create_configs.py |  42 ++---
 .../os/zeppelin_dataengine_create_configs.py       |  74 ++++----
 infrastructure-provisioning/src/jupyter/fabfile.py |  28 +--
 .../src/jupyterlab/fabfile.py                      |  26 +--
 infrastructure-provisioning/src/project/fabfile.py |  10 +-
 .../src/project/scripts/reupload_ssh_key.py        |   2 +-
 infrastructure-provisioning/src/rstudio/fabfile.py |  28 +--
 infrastructure-provisioning/src/ssn/fabfile.py     |   6 +-
 .../src/ssn/scripts/backup.py                      |  46 ++---
 .../src/ssn/scripts/configure_gitlab.py            |  42 ++---
 .../src/ssn/scripts/configure_ui.py                |  20 +-
 .../src/ssn/scripts/docker_build.py                |  12 +-
 .../src/ssn/scripts/gitlab_deploy.py               |  10 +-
 .../src/ssn/scripts/restore.py                     |  36 ++--
 .../src/superset/fabfile.py                        |  18 +-
 .../src/tensor-rstudio/fabfile.py                  |  26 +--
 infrastructure-provisioning/src/tensor/fabfile.py  |  26 +--
 .../src/zeppelin/fabfile.py                        |  28 +--
 integration-tests/examples/copy_files.py           |   6 +-
 .../scenario_deeplearning/deeplearning_tests.py    |  18 +-
 .../examples/scenario_jupyter/jupyter_tests.py     |   4 +-
 .../examples/scenario_rstudio/rstudio_tests.py     |  12 +-
 .../examples/scenario_tensor/tensor_tests.py       |  18 +-
 .../examples/scenario_zeppelin/zeppelin_tests.py   |  10 +-
 152 files changed, 1276 insertions(+), 1278 deletions(-)


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org


[incubator-datalab] 01/02: [DATALAB-2091]: added check of subprocess.run output

Posted by lf...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

lfrolov pushed a commit to branch DATALAB-2091
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git

commit bb64257ceb0af5b27882474dfd364a707c325ca6
Author: leonidfrolov <fr...@gmail.com>
AuthorDate: Wed Feb 17 10:56:50 2021 +0200

    [DATALAB-2091]: added check of subprocess.run output
---
 .../scripts/deploy_datalab.py                      |   8 +-
 .../scripts/deploy_keycloak/deploy_keycloak.py     |   4 +-
 .../scripts/update_amazon_repositories.py          |  24 +--
 .../scripts/post-deployment_configuration.py       | 104 +++++------
 infrastructure-provisioning/src/base/entrypoint.py |  32 ++--
 .../src/base/scripts/install_user_key.py           |   2 +-
 .../src/dataengine-service/fabfile.py              |  10 +-
 .../src/dataengine/fabfile.py                      |  16 +-
 .../src/deeplearning/fabfile.py                    |  26 +--
 infrastructure-provisioning/src/edge/fabfile.py    |   6 +-
 .../src/edge/scripts/reupload_ssh_key.py           |   2 +-
 .../src/general/api/check_inactivity.py            |   4 +-
 .../src/general/api/configure.py                   |   4 +-
 .../src/general/api/create.py                      |   4 +-
 .../src/general/api/create_image.py                |   4 +-
 .../src/general/api/git_creds.py                   |   4 +-
 .../src/general/api/install_libs.py                |   4 +-
 .../src/general/api/list_libs.py                   |   4 +-
 .../src/general/api/reconfigure_spark.py           |   4 +-
 .../src/general/api/recreate.py                    |   4 +-
 .../src/general/api/reupload_key.py                |   4 +-
 .../src/general/api/start.py                       |   4 +-
 .../src/general/api/status.py                      |   4 +-
 .../src/general/api/stop.py                        |   4 +-
 .../src/general/api/terminate.py                   |   4 +-
 .../src/general/api/terminate_image.py             |   4 +-
 .../src/general/lib/aws/actions_lib.py             | 208 ++++++++++-----------
 .../src/general/lib/aws/meta_lib.py                |   2 +-
 .../src/general/lib/azure/actions_lib.py           |  58 +++---
 .../src/general/lib/gcp/actions_lib.py             | 158 ++++++++--------
 .../src/general/lib/gcp/meta_lib.py                |   2 +-
 .../src/general/lib/os/debian/common_lib.py        |   6 +-
 .../src/general/lib/os/fab.py                      |  30 +--
 .../src/general/lib/os/redhat/common_lib.py        |   6 +-
 .../src/general/lib/os/redhat/notebook_lib.py      |  12 +-
 ...common_notebook_configure_dataengine-service.py |   4 +-
 .../aws/common_notebook_configure_dataengine.py    |   4 +-
 .../general/scripts/aws/common_prepare_notebook.py |   2 +-
 .../src/general/scripts/aws/common_reupload_key.py |   2 +-
 .../general/scripts/aws/common_start_notebook.py   |   4 +-
 .../scripts/aws/dataengine-service_configure.py    |   8 +-
 .../scripts/aws/dataengine-service_install_libs.py |   2 +-
 .../scripts/aws/dataengine-service_list_libs.py    |   2 +-
 .../scripts/aws/dataengine-service_prepare.py      |  10 +-
 .../general/scripts/aws/dataengine_configure.py    |  26 +--
 .../src/general/scripts/aws/dataengine_prepare.py  |   4 +-
 .../src/general/scripts/aws/dataengine_start.py    |   2 +-
 .../general/scripts/aws/deeplearning_configure.py  |  16 +-
 .../src/general/scripts/aws/edge_configure.py      |  14 +-
 .../src/general/scripts/aws/edge_status.py         |   2 +-
 .../src/general/scripts/aws/jupyter_configure.py   |  18 +-
 .../jupyter_dataengine-service_create_configs.py   |  80 ++++----
 .../jupyter_install_dataengine-service_kernels.py  |   2 +-
 .../general/scripts/aws/jupyterlab_configure.py    |  22 +--
 .../src/general/scripts/aws/odahu_deploy.py        |  10 +-
 .../src/general/scripts/aws/odahu_prepare.py       |   2 +-
 .../src/general/scripts/aws/project_prepare.py     |  28 +--
 .../src/general/scripts/aws/rstudio_configure.py   |  16 +-
 .../rstudio_dataengine-service_create_configs.py   |  38 ++--
 .../src/general/scripts/aws/ssn_configure.py       |  14 +-
 .../src/general/scripts/aws/ssn_prepare.py         |  18 +-
 .../src/general/scripts/aws/ssn_terminate.py       |   2 +-
 .../scripts/aws/tensor-rstudio_configure.py        |  16 +-
 .../src/general/scripts/aws/tensor_configure.py    |  16 +-
 .../src/general/scripts/aws/zeppelin_configure.py  |  16 +-
 .../zeppelin_dataengine-service_create_configs.py  |  18 +-
 .../scripts/azure/common_create_notebook_image.py  |   4 +-
 .../azure/common_notebook_configure_dataengine.py  |   4 +-
 .../scripts/azure/common_prepare_notebook.py       |   2 +-
 .../general/scripts/azure/common_reupload_key.py   |   2 +-
 .../general/scripts/azure/common_start_notebook.py |   4 +-
 .../general/scripts/azure/dataengine_configure.py  |  26 +--
 .../general/scripts/azure/dataengine_prepare.py    |   4 +-
 .../src/general/scripts/azure/dataengine_start.py  |   2 +-
 .../scripts/azure/deeplearning_configure.py        |  20 +-
 .../src/general/scripts/azure/edge_configure.py    |  12 +-
 .../src/general/scripts/azure/edge_prepare.py      |  16 +-
 .../src/general/scripts/azure/edge_status.py       |   2 +-
 .../src/general/scripts/azure/jupyter_configure.py |  22 +--
 .../general/scripts/azure/jupyterlab_configure.py  |  24 +--
 .../src/general/scripts/azure/project_prepare.py   |  22 +--
 .../src/general/scripts/azure/rstudio_configure.py |  20 +-
 .../src/general/scripts/azure/ssn_configure.py     |  12 +-
 .../src/general/scripts/azure/ssn_prepare.py       |  16 +-
 .../src/general/scripts/azure/tensor_configure.py  |  20 +-
 .../general/scripts/azure/zeppelin_configure.py    |  20 +-
 ...common_notebook_configure_dataengine-service.py |   4 +-
 .../gcp/common_notebook_configure_dataengine.py    |   4 +-
 .../general/scripts/gcp/common_prepare_notebook.py |   2 +-
 .../src/general/scripts/gcp/common_reupload_key.py |   2 +-
 .../general/scripts/gcp/common_start_notebook.py   |   4 +-
 .../scripts/gcp/dataengine-service_configure.py    |   4 +-
 .../scripts/gcp/dataengine-service_install_libs.py |   2 +-
 .../scripts/gcp/dataengine-service_list_libs.py    |   2 +-
 .../scripts/gcp/dataengine-service_prepare.py      |  10 +-
 .../general/scripts/gcp/dataengine_configure.py    |  22 +--
 .../src/general/scripts/gcp/dataengine_prepare.py  |   4 +-
 .../src/general/scripts/gcp/dataengine_start.py    |   2 +-
 .../general/scripts/gcp/deeplearning_configure.py  |  16 +-
 .../src/general/scripts/gcp/edge_configure.py      |  14 +-
 .../src/general/scripts/gcp/edge_status.py         |   2 +-
 .../src/general/scripts/gcp/jupyter_configure.py   |  16 +-
 .../jupyter_dataengine-service_create_configs.py   |  38 ++--
 .../general/scripts/gcp/jupyterlab_configure.py    |  20 +-
 .../src/general/scripts/gcp/project_prepare.py     |  22 +--
 .../src/general/scripts/gcp/rstudio_configure.py   |  16 +-
 .../rstudio_dataengine-service_create_configs.py   |  34 ++--
 .../rstudio_install_dataengine-service_kernels.py  |   2 +-
 .../src/general/scripts/gcp/ssn_configure.py       |  12 +-
 .../src/general/scripts/gcp/ssn_prepare.py         |  12 +-
 .../src/general/scripts/gcp/ssn_terminate.py       |   2 +-
 .../src/general/scripts/gcp/superset_configure.py  |  18 +-
 .../scripts/gcp/tensor-rstudio_configure.py        |  16 +-
 .../src/general/scripts/gcp/tensor_configure.py    |  16 +-
 .../src/general/scripts/gcp/zeppelin_configure.py  |  16 +-
 .../zeppelin_dataengine-service_create_configs.py  |  18 +-
 .../general/scripts/os/dataengine_install_libs.py  |   4 +-
 .../src/general/scripts/os/dataengine_list_libs.py |   2 +-
 .../scripts/os/dataengine_reconfigure_spark.py     |   6 +-
 .../os/deeplearning_dataengine_create_configs.py   |  42 ++---
 .../os/jupyter_dataengine_create_configs.py        |  70 +++----
 .../src/general/scripts/os/notebook_git_creds.py   |   4 +-
 .../scripts/os/notebook_inactivity_check.py        |   2 +-
 .../general/scripts/os/notebook_install_libs.py    |   2 +-
 .../src/general/scripts/os/notebook_list_libs.py   |   2 +-
 .../scripts/os/notebook_reconfigure_spark.py       |   2 +-
 .../os/rstudio_dataengine_create_configs.py        |  34 ++--
 .../os/tensor-rstudio_dataengine_create_configs.py |  34 ++--
 .../scripts/os/tensor_dataengine_create_configs.py |  42 ++---
 .../os/zeppelin_dataengine_create_configs.py       |  74 ++++----
 infrastructure-provisioning/src/jupyter/fabfile.py |  28 +--
 .../src/jupyterlab/fabfile.py                      |  26 +--
 infrastructure-provisioning/src/project/fabfile.py |  10 +-
 .../src/project/scripts/reupload_ssh_key.py        |   2 +-
 infrastructure-provisioning/src/rstudio/fabfile.py |  28 +--
 infrastructure-provisioning/src/ssn/fabfile.py     |   6 +-
 .../src/ssn/scripts/backup.py                      |  46 ++---
 .../src/ssn/scripts/configure_gitlab.py            |  42 ++---
 .../src/ssn/scripts/configure_ui.py                |  20 +-
 .../src/ssn/scripts/docker_build.py                |  12 +-
 .../src/ssn/scripts/gitlab_deploy.py               |  10 +-
 .../src/ssn/scripts/restore.py                     |  36 ++--
 .../src/superset/fabfile.py                        |  18 +-
 .../src/tensor-rstudio/fabfile.py                  |  26 +--
 infrastructure-provisioning/src/tensor/fabfile.py  |  26 +--
 .../src/zeppelin/fabfile.py                        |  28 +--
 integration-tests/examples/copy_files.py           |   6 +-
 .../scenario_deeplearning/deeplearning_tests.py    |  18 +-
 .../examples/scenario_jupyter/jupyter_tests.py     |   4 +-
 .../examples/scenario_rstudio/rstudio_tests.py     |  12 +-
 .../examples/scenario_tensor/tensor_tests.py       |  18 +-
 .../examples/scenario_zeppelin/zeppelin_tests.py   |  10 +-
 152 files changed, 1276 insertions(+), 1276 deletions(-)

diff --git a/infrastructure-provisioning/scripts/deploy_datalab.py b/infrastructure-provisioning/scripts/deploy_datalab.py
index 89c130c..d1096d4 100644
--- a/infrastructure-provisioning/scripts/deploy_datalab.py
+++ b/infrastructure-provisioning/scripts/deploy_datalab.py
@@ -201,21 +201,21 @@ def build_docker_images(args):
     # Building base and ssn docker images
     subprocess.run('cd {2}; sudo docker build --build-arg OS={0} --build-arg SRC_PATH="infrastructure-provisioning/src/" --file '
               'infrastructure-provisioning/src/general/files/{1}/'
-              'base_Dockerfile -t docker.datalab-base .'.format(args.conf_os_family, args.conf_cloud_provider, args.workspace_path), shell=True)
+              'base_Dockerfile -t docker.datalab-base .'.format(args.conf_os_family, args.conf_cloud_provider, args.workspace_path), shell=True, check=True)
     subprocess.run('cd {2}; sudo docker build --build-arg OS={0} --file infrastructure-provisioning/src/general/files/{1}/'
-              'ssn_Dockerfile -t docker.datalab-ssn .'.format(args.conf_os_family, args.conf_cloud_provider, args.workspace_path), shell=True)
+              'ssn_Dockerfile -t docker.datalab-ssn .'.format(args.conf_os_family, args.conf_cloud_provider, args.workspace_path), shell=True, check=True)
 
 
 def deploy_datalab(args):
     # Creating SSN node
     docker_command = generate_docker_command()
-    subprocess.run(docker_command, shell=True)
+    subprocess.run(docker_command, shell=True, check=True)
 
 
 def terminate_datalab(args):
     # Dropping datalab environment with selected infrastructure tag
     docker_command = generate_docker_command()
-    subprocess.run(docker_command, shell=True)
+    subprocess.run(docker_command, shell=True, check=True)
 
 
 if __name__ == "__main__":
diff --git a/infrastructure-provisioning/scripts/deploy_keycloak/deploy_keycloak.py b/infrastructure-provisioning/scripts/deploy_keycloak/deploy_keycloak.py
index 76a11d9..74ab0a0 100644
--- a/infrastructure-provisioning/scripts/deploy_keycloak/deploy_keycloak.py
+++ b/infrastructure-provisioning/scripts/deploy_keycloak/deploy_keycloak.py
@@ -85,8 +85,8 @@ def configure_nginx():
     conn.sudo("systemctl restart nginx")
 
 if __name__ == "__main__":
-    subprocess.run("sudo mkdir /logs/keycloak -p", shell=True)
-    subprocess.run('sudo chown ' + args.os_user + ':' + args.os_user + ' -R /logs/keycloak', shell=True)
+    subprocess.run("sudo mkdir /logs/keycloak -p", shell=True, check=True)
+    subprocess.run('sudo chown ' + args.os_user + ':' + args.os_user + ' -R /logs/keycloak', shell=True, check=True)
     local_log_filename = "keycloak_deployment_script.log"
     local_log_filepath = "/logs/keycloak/" + local_log_filename
     logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
diff --git a/infrastructure-provisioning/scripts/deploy_repository/scripts/update_amazon_repositories.py b/infrastructure-provisioning/scripts/deploy_repository/scripts/update_amazon_repositories.py
index f3ff6f8..28c4ba3 100644
--- a/infrastructure-provisioning/scripts/deploy_repository/scripts/update_amazon_repositories.py
+++ b/infrastructure-provisioning/scripts/deploy_repository/scripts/update_amazon_repositories.py
@@ -32,22 +32,22 @@ args = parser.parse_args()
 
 if __name__ == "__main__":
     nexus_password = 'NEXUS_PASSWORD'
-    subprocess.run('wget http://repo.{}.amazonaws.com/2017.09/main/mirror.list -O /tmp/main_mirror.list'.format(args.region), shell=True)
+    subprocess.run('wget http://repo.{}.amazonaws.com/2017.09/main/mirror.list -O /tmp/main_mirror.list'.format(args.region), shell=True, check=True)
     subprocess.run('wget http://repo.{}.amazonaws.com/2017.09/updates/mirror.list -O /tmp/updates_mirror.list'.format(
-        args.region), shell=True)
+        args.region), shell=True, check=True)
     amazon_main_repo = subprocess.run("cat /tmp/main_mirror.list  | grep {} | sed 's/$basearch//g'".format(args.region),
-                             capture_output=True, shell=True)
+                             capture_output=True, shell=True, check=True)
     amazon_updates_repo = subprocess.run("cat /tmp/updates_mirror.list  | grep {} | sed 's/$basearch//g'".format(args.region),
-                                capture_output=True, shell=True)
-    subprocess.run('cp -f /opt/nexus/updateRepositories.groovy /tmp/updateRepositories.groovy', shell=True)
-    subprocess.run('sed -i "s|AMAZON_MAIN_URL|{}|g" /tmp/updateRepositories.groovy'.format(amazon_main_repo), shell=True)
-    subprocess.run('sed -i "s|AMAZON_UPDATES_URL|{}|g" /tmp/updateRepositories.groovy'.format(amazon_updates_repo), shell=True)
+                                capture_output=True, shell=True, check=True)
+    subprocess.run('cp -f /opt/nexus/updateRepositories.groovy /tmp/updateRepositories.groovy', shell=True, check=True)
+    subprocess.run('sed -i "s|AMAZON_MAIN_URL|{}|g" /tmp/updateRepositories.groovy'.format(amazon_main_repo), shell=True, check=True)
+    subprocess.run('sed -i "s|AMAZON_UPDATES_URL|{}|g" /tmp/updateRepositories.groovy'.format(amazon_updates_repo), shell=True, check=True)
     subprocess.run('/usr/local/groovy/latest/bin/groovy /tmp/addUpdateScript.groovy -u "admin" -p "{}" '
           '-n "updateRepositories" -f "/tmp/updateRepositories.groovy" -h "http://localhost:8081"'.format(
-           nexus_password), shell=True)
+           nexus_password), shell=True, check=True)
     subprocess.run('curl -u admin:{} -X POST --header \'Content-Type: text/plain\' '
-          'http://localhost:8081/service/rest/v1/script/updateRepositories/run'.format(nexus_password), shell=True)
-    subprocess.run('rm -f /tmp/main_mirror.list', shell=True)
-    subprocess.run('rm -f /tmp/updates_mirror.list', shell=True)
-    subprocess.run('rm -f /tmp/updateRepositories.groovy', shell=True)
+          'http://localhost:8081/service/rest/v1/script/updateRepositories/run'.format(nexus_password), shell=True, check=True)
+    subprocess.run('rm -f /tmp/main_mirror.list', shell=True, check=True)
+    subprocess.run('rm -f /tmp/updates_mirror.list', shell=True, check=True)
+    subprocess.run('rm -f /tmp/updateRepositories.groovy', shell=True, check=True)
     print('Amazon repositories have been successfully updated!')
diff --git a/infrastructure-provisioning/scripts/post-deployment_configuration.py b/infrastructure-provisioning/scripts/post-deployment_configuration.py
index 1e647a3..c932701 100644
--- a/infrastructure-provisioning/scripts/post-deployment_configuration.py
+++ b/infrastructure-provisioning/scripts/post-deployment_configuration.py
@@ -53,140 +53,140 @@ if __name__ == "__main__":
     datalab_region = '-'.join(datalab_zone.split('-', 2)[:2])
     deployment_vpcId = subprocess.run(
         "sudo gcloud compute instances describe {0} --zone {1} --format 'value(networkInterfaces.network)' | sed 's|.*/||'".format(
-            datalab_sbn, datalab_zone), capture_output=True, shell=True)
+            datalab_sbn, datalab_zone), capture_output=True, shell=True, check=True)
     deployment_subnetId = subprocess.run(
         "sudo gcloud compute instances describe {0} --zone {1} --format 'value(networkInterfaces.subnetwork)' | sed 's|.*/||'".format(
-            datalab_sbn, datalab_zone), capture_output=True, shell=True)
+            datalab_sbn, datalab_zone), capture_output=True, shell=True, check=True)
     gcp_projectId = requests.get('http://metadata/computeMetadata/v1/project/project-id', headers=headers).text
     keycloak_redirectUri = 'http://{}'.format(server_external_ip)
 
     print("Generationg SSH keyfile for datalab-user")
     key = RSA.generate(2048)
-    subprocess.run("sudo sh -c 'echo \"{}\" > /home/datalab-user/keys/KEY-FILE.pem'".format(key.exportKey('PEM')), shell=True)
-    subprocess.run("sudo chmod 600 /home/datalab-user/keys/KEY-FILE.pem", shell=True)
+    subprocess.run("sudo sh -c 'echo \"{}\" > /home/datalab-user/keys/KEY-FILE.pem'".format(key.exportKey('PEM')), shell=True, check=True)
+    subprocess.run("sudo chmod 600 /home/datalab-user/keys/KEY-FILE.pem", shell=True, check=True)
     pubkey = key.publickey()
-    subprocess.run("sudo sh -c 'echo \"{}\" > /home/datalab-user/.ssh/authorized_keys'".format(pubkey.exportKey('OpenSSH')), shell=True)
+    subprocess.run("sudo sh -c 'echo \"{}\" > /home/datalab-user/.ssh/authorized_keys'".format(pubkey.exportKey('OpenSSH')), shell=True, check=True)
 
     print("Generationg MongoDB password")
     mongo_pwd = uuid.uuid4().hex
     try:
         subprocess.run(
             "sudo echo -e 'db.changeUserPassword(\"admin\", \"{}\")' | mongo datalabdb --port 27017 -u admin -p MONGO_PASSWORD".format(
-                mongo_pwd), shell=True)
-        subprocess.run('sudo sed -i "s|MONGO_PASSWORD|{}|g" /opt/datalab/conf/billing.yml'.format(mongo_pwd), shell=True)
+                mongo_pwd), shell=True, check=True)
+        subprocess.run('sudo sed -i "s|MONGO_PASSWORD|{}|g" /opt/datalab/conf/billing.yml'.format(mongo_pwd), shell=True, check=True)
 
-        subprocess.run('sudo sed -i "s|MONGO_PASSWORD|{}|g" /opt/datalab/conf/ssn.yml'.format(mongo_pwd), shell=True)
+        subprocess.run('sudo sed -i "s|MONGO_PASSWORD|{}|g" /opt/datalab/conf/ssn.yml'.format(mongo_pwd), shell=True, check=True)
     except:
         print('Mongo password was already changed')
 
     print('Reserving external IP')
     static_address_exist = subprocess.run(
-        "sudo gcloud compute addresses list --filter='address={}'".format(server_external_ip), capture_output=True, shell=True)
+        "sudo gcloud compute addresses list --filter='address={}'".format(server_external_ip), capture_output=True, shell=True, check=True)
     if static_address_exist:
         print('Address is already static')
     else:
         subprocess.run("sudo gcloud compute addresses create {0} --addresses {1} --region {2}".format(datalab_ssn_static_ip_name,
                                                                                              server_external_ip,
                                                                                              datalab_region),
-              capture_output=True, shell=True)
+              capture_output=True, shell=True, check=True)
 
     print("Overwriting SSN parameters")
 
     if deployment_subnetId == 'default':
         subprocess.run(
-            'sudo sed -i "s|# user_subnets_range|user_subnets_range|g" /opt/datalab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini', shell=True)
+            'sudo sed -i "s|# user_subnets_range|user_subnets_range|g" /opt/datalab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini', shell=True, check=True)
 
-    subprocess.run('sudo sed -i "s|DATALAB_SBN|{}|g" /opt/datalab/conf/self-service.yml'.format(datalab_sbn), shell=True)
-    subprocess.run('sudo sed -i "s|KEYCLOAK_REDIRECTURI|{}|g" /opt/datalab/conf/self-service.yml'.format(keycloak_redirectUri), shell=True)
+    subprocess.run('sudo sed -i "s|DATALAB_SBN|{}|g" /opt/datalab/conf/self-service.yml'.format(datalab_sbn), shell=True, check=True)
+    subprocess.run('sudo sed -i "s|KEYCLOAK_REDIRECTURI|{}|g" /opt/datalab/conf/self-service.yml'.format(keycloak_redirectUri), shell=True, check=True)
     subprocess.run(
-        'sudo sed -i "s|KEYCLOAK_REALM_NAME|{}|g" /opt/datalab/conf/self-service.yml'.format(args.keycloak_realm_name), shell=True)
+        'sudo sed -i "s|KEYCLOAK_REALM_NAME|{}|g" /opt/datalab/conf/self-service.yml'.format(args.keycloak_realm_name), shell=True, check=True)
     subprocess.run('sudo sed -i "s|KEYCLOAK_AUTH_SERVER_URL|{}|g" /opt/datalab/conf/self-service.yml'.format(
-        args.keycloak_auth_server_url), shell=True)
+        args.keycloak_auth_server_url), shell=True, check=True)
     subprocess.run('sudo sed -i "s|KEYCLOAK_CLIENT_NAME|{}|g" /opt/datalab/conf/self-service.yml'.format(
-        args.keycloak_client_name), shell=True)
+        args.keycloak_client_name), shell=True, check=True)
     subprocess.run('sudo sed -i "s|KEYCLOAK_CLIENT_SECRET|{}|g" /opt/datalab/conf/self-service.yml'.format(
-        args.keycloak_client_secret), shell=True)
+        args.keycloak_client_secret), shell=True, check=True)
 
     subprocess.run(
-        'sudo sed -i "s|KEYCLOAK_REALM_NAME|{}|g" /opt/datalab/conf/provisioning.yml'.format(args.keycloak_realm_name), shell=True)
+        'sudo sed -i "s|KEYCLOAK_REALM_NAME|{}|g" /opt/datalab/conf/provisioning.yml'.format(args.keycloak_realm_name), shell=True, check=True)
     subprocess.run('sudo sed -i "s|KEYCLOAK_AUTH_SERVER_URL|{}|g" /opt/datalab/conf/provisioning.yml'.format(
-        args.keycloak_auth_server_url), shell=True)
+        args.keycloak_auth_server_url), shell=True, check=True)
     subprocess.run('sudo sed -i "s|KEYCLOAK_CLIENT_NAME|{}|g" /opt/datalab/conf/provisioning.yml'.format(
-        args.keycloak_client_name), shell=True)
+        args.keycloak_client_name), shell=True, check=True)
     subprocess.run('sudo sed -i "s|KEYCLOAK_CLIENT_SECRET|{}|g" /opt/datalab/conf/provisioning.yml'.format(
-        args.keycloak_client_secret), shell=True)
-    subprocess.run('sudo sed -i "s|DATALAB_SBN|{}|g" /opt/datalab/conf/provisioning.yml'.format(datalab_sbn), shell=True)
-    subprocess.run('sudo sed -i "s|SUBNET_ID|{}|g" /opt/datalab/conf/provisioning.yml'.format(deployment_subnetId), shell=True)
-    subprocess.run('sudo sed -i "s|DATALAB_REGION|{}|g" /opt/datalab/conf/provisioning.yml'.format(datalab_region), shell=True)
-    subprocess.run('sudo sed -i "s|DATALAB_ZONE|{}|g" /opt/datalab/conf/provisioning.yml'.format(datalab_zone), shell=True)
-    subprocess.run('sudo sed -i "s|SSN_VPC_ID|{}|g" /opt/datalab/conf/provisioning.yml'.format(deployment_vpcId), shell=True)
-    subprocess.run('sudo sed -i "s|GCP_PROJECT_ID|{}|g" /opt/datalab/conf/provisioning.yml'.format(gcp_projectId), shell=True)
-    subprocess.run('sudo sed -i "s|KEYCLOAK_USER|{}|g" /opt/datalab/conf/provisioning.yml'.format(args.keycloak_user), shell=True)
+        args.keycloak_client_secret), shell=True, check=True)
+    subprocess.run('sudo sed -i "s|DATALAB_SBN|{}|g" /opt/datalab/conf/provisioning.yml'.format(datalab_sbn), shell=True, check=True)
+    subprocess.run('sudo sed -i "s|SUBNET_ID|{}|g" /opt/datalab/conf/provisioning.yml'.format(deployment_subnetId), shell=True, check=True)
+    subprocess.run('sudo sed -i "s|DATALAB_REGION|{}|g" /opt/datalab/conf/provisioning.yml'.format(datalab_region), shell=True, check=True)
+    subprocess.run('sudo sed -i "s|DATALAB_ZONE|{}|g" /opt/datalab/conf/provisioning.yml'.format(datalab_zone), shell=True, check=True)
+    subprocess.run('sudo sed -i "s|SSN_VPC_ID|{}|g" /opt/datalab/conf/provisioning.yml'.format(deployment_vpcId), shell=True, check=True)
+    subprocess.run('sudo sed -i "s|GCP_PROJECT_ID|{}|g" /opt/datalab/conf/provisioning.yml'.format(gcp_projectId), shell=True, check=True)
+    subprocess.run('sudo sed -i "s|KEYCLOAK_USER|{}|g" /opt/datalab/conf/provisioning.yml'.format(args.keycloak_user), shell=True, check=True)
     subprocess.run('sudo sed -i "s|KEYCLOAK_ADMIN_PASSWORD|{}|g" /opt/datalab/conf/provisioning.yml'.format(
-        args.keycloak_admin_password), shell=True)
+        args.keycloak_admin_password), shell=True, check=True)
 
-    subprocess.run('sudo sed -i "s|DATALAB_SBN|{}|g" /opt/datalab/conf/billing.yml'.format(datalab_sbn), shell=True)
+    subprocess.run('sudo sed -i "s|DATALAB_SBN|{}|g" /opt/datalab/conf/billing.yml'.format(datalab_sbn), shell=True, check=True)
 
     subprocess.run(
         'sudo sed -i "s|DATALAB_SBN|{}|g" /opt/datalab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(
-            datalab_sbn), shell=True)
+            datalab_sbn), shell=True, check=True)
     subprocess.run(
         'sudo sed -i "s|GCP_PROJECT_ID|{}|g" /opt/datalab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(
-            gcp_projectId), shell=True)
+            gcp_projectId), shell=True, check=True)
     subprocess.run(
         'sudo sed -i "s|DATALAB_REGION|{}|g" /opt/datalab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(
-            datalab_region), shell=True)
+            datalab_region), shell=True, check=True)
     subprocess.run(
         'sudo sed -i "s|DATALAB_ZONE|{}|g" /opt/datalab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(
-            datalab_zone), shell=True)
+            datalab_zone), shell=True, check=True)
     subprocess.run(
         'sudo sed -i "s|KEYCLOAK_REALM_NAME|{}|g" /opt/datalab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(
-            args.keycloak_realm_name), shell=True)
+            args.keycloak_realm_name), shell=True, check=True)
     subprocess.run(
         'sudo sed -i "s|KEYCLOAK_AUTH_SERVER_URL|{}|g" /opt/datalab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(
-            args.keycloak_auth_server_url), shell=True)
+            args.keycloak_auth_server_url), shell=True, check=True)
     subprocess.run(
         'sudo sed -i "s|KEYCLOAK_CLIENT_NAME|{}|g" /opt/datalab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(
-            args.keycloak_client_name), shell=True)
+            args.keycloak_client_name), shell=True, check=True)
     subprocess.run(
         'sudo sed -i "s|KEYCLOAK_CLIENT_SECRET|{}|g" /opt/datalab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(
-            args.keycloak_client_secret), shell=True)
+            args.keycloak_client_secret), shell=True, check=True)
     subprocess.run(
         'sudo sed -i "s|KEYCLOAK_USER|{}|g" /opt/datalab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(
-            args.keycloak_user), shell=True)
+            args.keycloak_user), shell=True, check=True)
     subprocess.run(
         'sudo sed -i "s|KEYCLOAK_ADMIN_PASSWORD|{}|g" /opt/datalab/sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(
-            args.keycloak_admin_password), shell=True)
+            args.keycloak_admin_password), shell=True, check=True)
 
     print('SSL certificate generating')
     keystore_passwd = uuid.uuid4().hex
-    subprocess.run('sudo rm /home/datalab-user/keys/ssn*', shell=True)
-    subprocess.run('sudo rm /etc/ssl/certs/datalab*', shell=True)
+    subprocess.run('sudo rm /home/datalab-user/keys/ssn*', shell=True, check=True)
+    subprocess.run('sudo rm /etc/ssl/certs/datalab*', shell=True, check=True)
     subprocess.run('sudo keytool -delete -noprompt -trustcacerts -alias ssn -storepass changeit -keystore '
-        '/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/security/cacerts', shell=True)
+        '/usr/lib/jvm/java-8-openjdk-amd64/jre/lib/security/cacerts', shell=True, check=True)
     subprocess.run(
         'sudo openssl req -x509 -nodes -days 3650 -newkey rsa:2048 -keyout /etc/ssl/certs/datalab.key -out '
         '/etc/ssl/certs/datalab.crt -subj "/C=US/ST=US/L=US/O=datalab/CN=localhost/subjectAltName={0}"'.format(
-            server_external_ip), shell=True)
+            server_external_ip), shell=True, check=True)
     subprocess.run(
         'sudo openssl pkcs12 -export -in /etc/ssl/certs/datalab.crt -inkey /etc/ssl/certs/datalab.key -name ssn -out '
-        '/home/datalab-user/keys/ssn.p12 -password pass:{0}'.format(keystore_passwd), shell=True)
+        '/home/datalab-user/keys/ssn.p12 -password pass:{0}'.format(keystore_passwd), shell=True, check=True)
     subprocess.run(
         'sudo keytool -importkeystore -srckeystore /home/datalab-user/keys/ssn.p12 -srcstoretype PKCS12 -alias '
         'ssn -destkeystore /home/datalab-user/keys/ssn.keystore.jks -deststorepass {0} -srcstorepass {0}'.format(
-            keystore_passwd), shell=True)
+            keystore_passwd), shell=True, check=True)
     subprocess.run(
         'sudo keytool -importcert -trustcacerts -alias ssn -file /etc/ssl/certs/datalab.crt -noprompt -storepass '
-        'changeit -keystore /usr/lib/jvm/java-8-openjdk-amd64/jre/lib/security/cacerts', shell=True)
-    subprocess.run('sudo sed -i "s|KEYSTORE_PASSWORD|{}|g" /opt/datalab/conf/ssn.yml'.format(keystore_passwd), shell=True)
+        'changeit -keystore /usr/lib/jvm/java-8-openjdk-amd64/jre/lib/security/cacerts', shell=True, check=True)
+    subprocess.run('sudo sed -i "s|KEYSTORE_PASSWORD|{}|g" /opt/datalab/conf/ssn.yml'.format(keystore_passwd), shell=True, check=True)
 
     print('Nginx configuration updating')
-    subprocess.run('sudo sed -i "s|SERVER_IP|{}|g" /etc/nginx/conf.d/nginx_proxy.conf'.format(server_external_ip), shell=True)
-    subprocess.run('sudo systemctl restart nginx', shell=True)
-    subprocess.run('sudo supervisorctl restart all', shell=True)
+    subprocess.run('sudo sed -i "s|SERVER_IP|{}|g" /etc/nginx/conf.d/nginx_proxy.conf'.format(server_external_ip), shell=True, check=True)
+    subprocess.run('sudo systemctl restart nginx', shell=True, check=True)
+    subprocess.run('sudo supervisorctl restart all', shell=True, check=True)
 
     print('Rebuilding docker images')
-    subprocess.run('cd /opt/datalab/sources/infrastructure-provisioning/src/ && sudo docker-build all', shell=True)
+    subprocess.run('cd /opt/datalab/sources/infrastructure-provisioning/src/ && sudo docker-build all', shell=True, check=True)
 
     print('[SUMMARY]')
     print('Mongo password stored in /opt/datalab/conf/ssn.yml')
diff --git a/infrastructure-provisioning/src/base/entrypoint.py b/infrastructure-provisioning/src/base/entrypoint.py
index d1f6917..1095c7b 100644
--- a/infrastructure-provisioning/src/base/entrypoint.py
+++ b/infrastructure-provisioning/src/base/entrypoint.py
@@ -108,7 +108,7 @@ if __name__ == "__main__":
         pass
 
     if args.action != 'terminate':
-        subprocess.run('chmod 600 /root/keys/*.pem', shell=True)
+        subprocess.run('chmod 600 /root/keys/*.pem', shell=True, check=True)
 
     if dry_run:
         with open("/response/{}.json".format(request_id), 'w') as response_file:
@@ -117,10 +117,10 @@ if __name__ == "__main__":
 
     # Run execution routines
     elif args.action == 'create':
-        subprocess.run("/bin/create.py", shell=True)
+        subprocess.run("/bin/create.py", shell=True, check=True)
 
     elif args.action == 'status':
-        subprocess.run("/bin/status.py", shell=True)
+        subprocess.run("/bin/status.py", shell=True, check=True)
 
     elif args.action == 'describe':
         with open('/root/description.json') as json_file:
@@ -130,40 +130,40 @@ if __name__ == "__main__":
                 response_file.write(json.dumps(description))
 
     elif args.action == 'stop':
-        subprocess.run("/bin/stop.py", shell=True)
+        subprocess.run("/bin/stop.py", shell=True, check=True)
 
     elif args.action == 'start':
-        subprocess.run("/bin/start.py", shell=True)
+        subprocess.run("/bin/start.py", shell=True, check=True)
 
     elif args.action == 'terminate':
-        subprocess.run("/bin/terminate.py", shell=True)
+        subprocess.run("/bin/terminate.py", shell=True, check=True)
 
     elif args.action == 'configure':
-        subprocess.run("/bin/configure.py", shell=True)
+        subprocess.run("/bin/configure.py", shell=True, check=True)
 
     elif args.action == 'recreate':
-        subprocess.run("/bin/recreate.py", shell=True)
+        subprocess.run("/bin/recreate.py", shell=True, check=True)
 
     elif args.action == 'reupload_key':
-        subprocess.run("/bin/reupload_key.py", shell=True)
+        subprocess.run("/bin/reupload_key.py", shell=True, check=True)
 
     elif args.action == 'lib_install':
-        subprocess.run("/bin/install_libs.py", shell=True)
+        subprocess.run("/bin/install_libs.py", shell=True, check=True)
 
     elif args.action == 'lib_list':
-        subprocess.run("/bin/list_libs.py", shell=True)
+        subprocess.run("/bin/list_libs.py", shell=True, check=True)
 
     elif args.action == 'git_creds':
-        subprocess.run("/bin/git_creds.py", shell=True)
+        subprocess.run("/bin/git_creds.py", shell=True, check=True)
 
     elif args.action == 'create_image':
-        subprocess.run("/bin/create_image.py", shell=True)
+        subprocess.run("/bin/create_image.py", shell=True, check=True)
 
     elif args.action == 'terminate_image':
-        subprocess.run("/bin/terminate_image.py", shell=True)
+        subprocess.run("/bin/terminate_image.py", shell=True, check=True)
 
     elif args.action == 'reconfigure_spark':
-        subprocess.run("/bin/reconfigure_spark.py", shell=True)
+        subprocess.run("/bin/reconfigure_spark.py", shell=True, check=True)
 
     elif args.action == 'check_inactivity':
-        subprocess.run("/bin/check_inactivity.py", shell=True)
\ No newline at end of file
+        subprocess.run("/bin/check_inactivity.py", shell=True, check=True)
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/base/scripts/install_user_key.py b/infrastructure-provisioning/src/base/scripts/install_user_key.py
index 7f36ca4..acfb3bb 100644
--- a/infrastructure-provisioning/src/base/scripts/install_user_key.py
+++ b/infrastructure-provisioning/src/base/scripts/install_user_key.py
@@ -38,7 +38,7 @@ args = parser.parse_args()
 
 def copy_key(config):
     admin_key_pub = subprocess.run('ssh-keygen -y -f {}'.format(args.keyfile),
-                          capture_output=True, shell=True)
+                          capture_output=True, shell=True, check=True)
     conn.sudo('rm -f /home/{}/.ssh/authorized_keys'.format(args.user))
     conn.sudo('echo "{0}" >> /home/{1}/.ssh/authorized_keys'.format(admin_key_pub, args.user))
     try:
diff --git a/infrastructure-provisioning/src/dataengine-service/fabfile.py b/infrastructure-provisioning/src/dataengine-service/fabfile.py
index ecf9481..8066457 100644
--- a/infrastructure-provisioning/src/dataengine-service/fabfile.py
+++ b/infrastructure-provisioning/src/dataengine-service/fabfile.py
@@ -42,14 +42,14 @@ def run():
     dataengine_service_config = dict()
     dataengine_service_config['uuid'] = str(uuid.uuid4())[:5]
     try:
-        subprocess.run("~/scripts/{}.py --uuid {}".format('dataengine-service_prepare', dataengine_service_config['uuid']), shell=True)
+        subprocess.run("~/scripts/{}.py --uuid {}".format('dataengine-service_prepare', dataengine_service_config['uuid']), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed preparing Data Engine service.", str(err))
         sys.exit(1)
 
     try:
-        subprocess.run("~/scripts/{}.py --uuid {}".format('dataengine-service_configure', dataengine_service_config['uuid']), shell=True)
+        subprocess.run("~/scripts/{}.py --uuid {}".format('dataengine-service_configure', dataengine_service_config['uuid']), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed configuring Data Engine service.", str(err))
@@ -66,7 +66,7 @@ def install_libs():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('dataengine-service_install_libs'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('dataengine-service_install_libs'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed installing additional libs for DataEngine service.", str(err))
@@ -83,7 +83,7 @@ def list_libs():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('dataengine-service_list_libs'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('dataengine-service_list_libs'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed get available libraries for Data Engine service.", str(err))
@@ -98,7 +98,7 @@ def terminate():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('dataengine-service_terminate'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('dataengine-service_terminate'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed configuring Notebook node.", str(err))
diff --git a/infrastructure-provisioning/src/dataengine/fabfile.py b/infrastructure-provisioning/src/dataengine/fabfile.py
index e1f7000..7674266 100644
--- a/infrastructure-provisioning/src/dataengine/fabfile.py
+++ b/infrastructure-provisioning/src/dataengine/fabfile.py
@@ -39,14 +39,14 @@ def run():
                         level=logging.INFO,
                         filename=local_log_filepath)
     try:
-        subprocess.run("~/scripts/{}.py".format('dataengine_prepare'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('dataengine_prepare'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed preparing Data Engine.", str(err))
         sys.exit(1)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('dataengine_configure'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('dataengine_configure'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed configuring Data Engine.", str(err))
@@ -61,7 +61,7 @@ def start():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('dataengine_start'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('dataengine_start'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed starting Data Engine.", str(err))
@@ -78,7 +78,7 @@ def install_libs():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('dataengine_install_libs'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('dataengine_install_libs'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed installing additional libs for DataEngine.", str(err))
@@ -95,7 +95,7 @@ def list_libs():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('dataengine_list_libs'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('dataengine_list_libs'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed get available libraries for Data Engine.", str(err))
@@ -110,7 +110,7 @@ def stop():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('dataengine_stop'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('dataengine_stop'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed stopping Data Engine.", str(err))
@@ -125,7 +125,7 @@ def terminate():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('dataengine_terminate'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('dataengine_terminate'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed terminating Data Engine.", str(err))
@@ -142,7 +142,7 @@ def reconfigure_spark():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('dataengine_reconfigure_spark'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('dataengine_reconfigure_spark'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to reconfigure Spark for Data Engine.", str(err))
diff --git a/infrastructure-provisioning/src/deeplearning/fabfile.py b/infrastructure-provisioning/src/deeplearning/fabfile.py
index 3e9f2df..2616ad1 100644
--- a/infrastructure-provisioning/src/deeplearning/fabfile.py
+++ b/infrastructure-provisioning/src/deeplearning/fabfile.py
@@ -45,7 +45,7 @@ def run():
 
     try:
         params = "--uuid {}".format(notebook_config['uuid'])
-        subprocess.run("~/scripts/{}.py {}".format('common_prepare_notebook', params), shell=True)
+        subprocess.run("~/scripts/{}.py {}".format('common_prepare_notebook', params), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed preparing Notebook node.", str(err))
@@ -53,7 +53,7 @@ def run():
 
     try:
         params = "--uuid {}".format(notebook_config['uuid'])
-        subprocess.run("~/scripts/{}.py {}".format('deeplearning_configure', params), shell=True)
+        subprocess.run("~/scripts/{}.py {}".format('deeplearning_configure', params), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed configuring Notebook node.", str(err))
@@ -68,7 +68,7 @@ def terminate():
                         level=logging.DEBUG,
                         filename=local_log_filepath)
     try:
-        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed terminating Notebook node.", str(err))
@@ -83,7 +83,7 @@ def stop():
                         level=logging.DEBUG,
                         filename=local_log_filepath)
     try:
-        subprocess.run("~/scripts/{}.py".format('common_stop_notebook'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_stop_notebook'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed stopping Notebook node.", str(err))
@@ -99,7 +99,7 @@ def start():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('common_start_notebook'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_start_notebook'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed starting Notebook node.", str(err))
@@ -117,7 +117,7 @@ def configure():
 
     try:
         if os.environ['conf_resource'] == 'dataengine':
-            subprocess.run("~/scripts/{}.py".format('common_notebook_configure_dataengine'), shell=True)
+            subprocess.run("~/scripts/{}.py".format('common_notebook_configure_dataengine'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed configuring dataengine on Notebook node.", str(err))
@@ -134,7 +134,7 @@ def install_libs():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('notebook_install_libs'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('notebook_install_libs'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed installing additional libs for Notebook node.", str(err))
@@ -151,7 +151,7 @@ def list_libs():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('notebook_list_libs'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('notebook_list_libs'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed get available libraries for notebook node.", str(err))
@@ -168,7 +168,7 @@ def git_creds():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('notebook_git_creds'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('notebook_git_creds'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to manage git credentials for notebook node.", str(err))
@@ -185,7 +185,7 @@ def create_image():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('common_create_notebook_image'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_create_notebook_image'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to create image from notebook node.", str(err))
@@ -202,7 +202,7 @@ def terminate_image():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook_image'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook_image'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to create image from notebook node.", str(err))
@@ -219,7 +219,7 @@ def reconfigure_spark():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('notebook_reconfigure_spark'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('notebook_reconfigure_spark'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to reconfigure Spark for Notebook node.", str(err))
@@ -235,7 +235,7 @@ def check_inactivity():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('notebook_inactivity_check'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('notebook_inactivity_check'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to check inactivity status.", str(err))
diff --git a/infrastructure-provisioning/src/edge/fabfile.py b/infrastructure-provisioning/src/edge/fabfile.py
index 5e8199e..f89e85f 100644
--- a/infrastructure-provisioning/src/edge/fabfile.py
+++ b/infrastructure-provisioning/src/edge/fabfile.py
@@ -39,7 +39,7 @@ def status():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('edge_status'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('edge_status'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed obtaining EDGE status.", str(err))
@@ -54,7 +54,7 @@ def stop():
                         level=logging.DEBUG,
                         filename=local_log_filepath)
     try:
-        subprocess.run("~/scripts/{}.py".format('edge_stop'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('edge_stop'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed stopping Edge node.", str(err))
@@ -69,7 +69,7 @@ def start():
                         level=logging.DEBUG,
                         filename=local_log_filepath)
     try:
-        subprocess.run("~/scripts/{}.py".format('edge_start'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('edge_start'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed starting Edge node.", str(err))
diff --git a/infrastructure-provisioning/src/edge/scripts/reupload_ssh_key.py b/infrastructure-provisioning/src/edge/scripts/reupload_ssh_key.py
index e1d5184..4f8483b 100644
--- a/infrastructure-provisioning/src/edge/scripts/reupload_ssh_key.py
+++ b/infrastructure-provisioning/src/edge/scripts/reupload_ssh_key.py
@@ -58,7 +58,7 @@ if __name__ == "__main__":
                 os.environ['conf_resource'], reupload_config['resource_id'],
                 reupload_config['os_user'],  reupload_config['keyfile'],
                 json.dumps(reupload_config['additional_config']))
-            subprocess.run("~/scripts/{}.py {}".format('common_reupload_key', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_reupload_key', params), shell=True, check=True)
         except Exception as err:
             traceback.print_exc()
             raise Exception
diff --git a/infrastructure-provisioning/src/general/api/check_inactivity.py b/infrastructure-provisioning/src/general/api/check_inactivity.py
index a0764fe..fa7bff2 100644
--- a/infrastructure-provisioning/src/general/api/check_inactivity.py
+++ b/infrastructure-provisioning/src/general/api/check_inactivity.py
@@ -29,7 +29,7 @@ import subprocess
 if __name__ == "__main__":
     success = True
     try:
-        subprocess.run('cd /root; fab check_inactivity', shell=True)
+        subprocess.run('cd /root; fab check_inactivity', shell=True, check=True)
     except:
         success = False
 
@@ -57,7 +57,7 @@ if __name__ == "__main__":
         response_file.write(json.dumps(reply))
 
     try:
-        subprocess.run('chmod 666 /response/*', shell=True)
+        subprocess.run('chmod 666 /response/*', shell=True, check=True)
     except:
         success = False
 
diff --git a/infrastructure-provisioning/src/general/api/configure.py b/infrastructure-provisioning/src/general/api/configure.py
index 742d54a..33d438c 100644
--- a/infrastructure-provisioning/src/general/api/configure.py
+++ b/infrastructure-provisioning/src/general/api/configure.py
@@ -29,7 +29,7 @@ import subprocess
 if __name__ == "__main__":
     success = True
     try:
-        subprocess.run('cd /root; fab configure', shell=True)
+        subprocess.run('cd /root; fab configure', shell=True, check=True)
     except:
         success = False
 
@@ -56,7 +56,7 @@ if __name__ == "__main__":
         response_file.write(json.dumps(reply))
 
     try:
-        subprocess.run('chmod 666 /response/*', shell=True)
+        subprocess.run('chmod 666 /response/*', shell=True, check=True)
     except:
         success = False
 
diff --git a/infrastructure-provisioning/src/general/api/create.py b/infrastructure-provisioning/src/general/api/create.py
index 48ff4e7..62d7f35 100644
--- a/infrastructure-provisioning/src/general/api/create.py
+++ b/infrastructure-provisioning/src/general/api/create.py
@@ -29,7 +29,7 @@ import subprocess
 if __name__ == "__main__":
     success = True
     try:
-        subprocess.run('cd /root; fab run', shell=True)
+        subprocess.run('cd /root; fab run', shell=True, check=True)
     except:
         success = False
 
@@ -63,7 +63,7 @@ if __name__ == "__main__":
             response_file.write(json.dumps(reply))
 
     try:
-        subprocess.run('chmod 666 /response/*', shell=True)
+        subprocess.run('chmod 666 /response/*', shell=True, check=True)
     except:
         success = False
 
diff --git a/infrastructure-provisioning/src/general/api/create_image.py b/infrastructure-provisioning/src/general/api/create_image.py
index f7aa81b..d8f8901 100644
--- a/infrastructure-provisioning/src/general/api/create_image.py
+++ b/infrastructure-provisioning/src/general/api/create_image.py
@@ -29,7 +29,7 @@ import subprocess
 if __name__ == "__main__":
     success = True
     try:
-        subprocess.run('cd /root; fab create_image', shell=True)
+        subprocess.run('cd /root; fab create_image', shell=True, check=True)
     except:
         success = False
 
@@ -57,7 +57,7 @@ if __name__ == "__main__":
         response_file.write(json.dumps(reply))
 
     try:
-        subprocess.run('chmod 666 /response/*', shell=True)
+        subprocess.run('chmod 666 /response/*', shell=True, check=True)
     except:
         success = False
 
diff --git a/infrastructure-provisioning/src/general/api/git_creds.py b/infrastructure-provisioning/src/general/api/git_creds.py
index 8598401..acebee1 100644
--- a/infrastructure-provisioning/src/general/api/git_creds.py
+++ b/infrastructure-provisioning/src/general/api/git_creds.py
@@ -29,7 +29,7 @@ import subprocess
 if __name__ == "__main__":
     success = True
     try:
-        subprocess.run('cd /root; fab git_creds', shell=True)
+        subprocess.run('cd /root; fab git_creds', shell=True, check=True)
     except:
         success = False
 
@@ -57,7 +57,7 @@ if __name__ == "__main__":
         response_file.write(json.dumps(reply))
 
     try:
-        subprocess.run('chmod 666 /response/*', shell=True)
+        subprocess.run('chmod 666 /response/*', shell=True, check=True)
     except:
         success = False
 
diff --git a/infrastructure-provisioning/src/general/api/install_libs.py b/infrastructure-provisioning/src/general/api/install_libs.py
index 4dc3620..921cd8f 100644
--- a/infrastructure-provisioning/src/general/api/install_libs.py
+++ b/infrastructure-provisioning/src/general/api/install_libs.py
@@ -29,7 +29,7 @@ import subprocess
 if __name__ == "__main__":
     success = True
     try:
-        subprocess.run('cd /root; fab install_libs', shell=True)
+        subprocess.run('cd /root; fab install_libs', shell=True, check=True)
     except:
         success = False
 
@@ -57,7 +57,7 @@ if __name__ == "__main__":
         response_file.write(json.dumps(reply))
 
     try:
-        subprocess.run('chmod 666 /response/*', shell=True)
+        subprocess.run('chmod 666 /response/*', shell=True, check=True)
     except:
         success = False
 
diff --git a/infrastructure-provisioning/src/general/api/list_libs.py b/infrastructure-provisioning/src/general/api/list_libs.py
index df59a67..b1e80a3 100644
--- a/infrastructure-provisioning/src/general/api/list_libs.py
+++ b/infrastructure-provisioning/src/general/api/list_libs.py
@@ -29,7 +29,7 @@ import subprocess
 if __name__ == "__main__":
     success = True
     try:
-        subprocess.run('cd /root; fab list_libs', shell=True)
+        subprocess.run('cd /root; fab list_libs', shell=True, check=True)
     except:
         success = False
 
@@ -72,7 +72,7 @@ if __name__ == "__main__":
         success = False
 
     try:
-        subprocess.run('chmod 666 /response/*', shell=True)
+        subprocess.run('chmod 666 /response/*', shell=True, check=True)
     except:
         success = False
 
diff --git a/infrastructure-provisioning/src/general/api/reconfigure_spark.py b/infrastructure-provisioning/src/general/api/reconfigure_spark.py
index d9f340a..82dc4f2 100644
--- a/infrastructure-provisioning/src/general/api/reconfigure_spark.py
+++ b/infrastructure-provisioning/src/general/api/reconfigure_spark.py
@@ -29,7 +29,7 @@ import subprocess
 if __name__ == "__main__":
     success = True
     try:
-        subprocess.run('cd /root; fab reconfigure_spark', shell=True)
+        subprocess.run('cd /root; fab reconfigure_spark', shell=True, check=True)
     except:
         success = False
 
@@ -57,7 +57,7 @@ if __name__ == "__main__":
         response_file.write(json.dumps(reply))
 
     try:
-        subprocess.run('chmod 666 /response/*', shell=True)
+        subprocess.run('chmod 666 /response/*', shell=True, check=True)
     except:
         success = False
 
diff --git a/infrastructure-provisioning/src/general/api/recreate.py b/infrastructure-provisioning/src/general/api/recreate.py
index bee479c..8f881e2 100644
--- a/infrastructure-provisioning/src/general/api/recreate.py
+++ b/infrastructure-provisioning/src/general/api/recreate.py
@@ -29,7 +29,7 @@ import subprocess
 if __name__ == "__main__":
     success = True
     try:
-        subprocess.run('cd /root; fab recreate', shell=True)
+        subprocess.run('cd /root; fab recreate', shell=True, check=True)
     except:
         success = False
 
@@ -63,7 +63,7 @@ if __name__ == "__main__":
             response_file.write(json.dumps(reply))
 
     try:
-        subprocess.run('chmod 666 /response/*', shell=True)
+        subprocess.run('chmod 666 /response/*', shell=True, check=True)
     except:
         success = False
 
diff --git a/infrastructure-provisioning/src/general/api/reupload_key.py b/infrastructure-provisioning/src/general/api/reupload_key.py
index 0c90c87..4a423ab 100644
--- a/infrastructure-provisioning/src/general/api/reupload_key.py
+++ b/infrastructure-provisioning/src/general/api/reupload_key.py
@@ -29,7 +29,7 @@ import subprocess
 if __name__ == "__main__":
     success = True
     try:
-        subprocess.run('cd /root; fab reupload_key', shell=True)
+        subprocess.run('cd /root; fab reupload_key', shell=True, check=True)
     except:
         success = False
 
@@ -59,7 +59,7 @@ if __name__ == "__main__":
     except:
         print('Can not write to responce')
     try:
-        subprocess.run('chmod 666 /response/*', shell=True)
+        subprocess.run('chmod 666 /response/*', shell=True, check=True)
     except:
         success = False
 
diff --git a/infrastructure-provisioning/src/general/api/start.py b/infrastructure-provisioning/src/general/api/start.py
index 8496879..8b2d97e 100644
--- a/infrastructure-provisioning/src/general/api/start.py
+++ b/infrastructure-provisioning/src/general/api/start.py
@@ -29,7 +29,7 @@ import subprocess
 if __name__ == "__main__":
     success = True
     try:
-        subprocess.run('cd /root; fab start', shell=True)
+        subprocess.run('cd /root; fab start', shell=True, check=True)
     except:
         success = False
 
@@ -56,7 +56,7 @@ if __name__ == "__main__":
         response_file.write(json.dumps(reply))
 
     try:
-        subprocess.run('chmod 666 /response/*', shell=True)
+        subprocess.run('chmod 666 /response/*', shell=True, check=True)
     except:
         success = False
 
diff --git a/infrastructure-provisioning/src/general/api/status.py b/infrastructure-provisioning/src/general/api/status.py
index f491c54..9805bab 100644
--- a/infrastructure-provisioning/src/general/api/status.py
+++ b/infrastructure-provisioning/src/general/api/status.py
@@ -29,7 +29,7 @@ import subprocess
 if __name__ == "__main__":
     success = True
     try:
-        subprocess.run('cd /root; fab status', shell=True)
+        subprocess.run('cd /root; fab status', shell=True, check=True)
     except:
         success = False
 
@@ -56,7 +56,7 @@ if __name__ == "__main__":
         response_file.write(json.dumps(reply))
 
     try:
-        subprocess.run('chmod 666 /response/*', shell=True)
+        subprocess.run('chmod 666 /response/*', shell=True, check=True)
     except:
         success = False
 
diff --git a/infrastructure-provisioning/src/general/api/stop.py b/infrastructure-provisioning/src/general/api/stop.py
index 471581f..71f7dae 100644
--- a/infrastructure-provisioning/src/general/api/stop.py
+++ b/infrastructure-provisioning/src/general/api/stop.py
@@ -29,7 +29,7 @@ import subprocess
 if __name__ == "__main__":
     success = True
     try:
-        subprocess.run('cd /root; fab stop', shell=True)
+        subprocess.run('cd /root; fab stop', shell=True, check=True)
     except:
         success = False
 
@@ -57,7 +57,7 @@ if __name__ == "__main__":
         response_file.write(json.dumps(reply))
 
     try:
-        subprocess.run('chmod 666 /response/*', shell=True)
+        subprocess.run('chmod 666 /response/*', shell=True, check=True)
     except:
         success = False
 
diff --git a/infrastructure-provisioning/src/general/api/terminate.py b/infrastructure-provisioning/src/general/api/terminate.py
index a4d1367..12904f8 100644
--- a/infrastructure-provisioning/src/general/api/terminate.py
+++ b/infrastructure-provisioning/src/general/api/terminate.py
@@ -29,7 +29,7 @@ import subprocess
 if __name__ == "__main__":
     success = True
     try:
-        subprocess.run('cd /root; fab terminate', shell=True)
+        subprocess.run('cd /root; fab terminate', shell=True, check=True)
     except:
         success = False
 
@@ -63,7 +63,7 @@ if __name__ == "__main__":
             response_file.write(json.dumps(reply))
 
     try:
-        subprocess.run('chmod 666 /response/*', shell=True)
+        subprocess.run('chmod 666 /response/*', shell=True, check=True)
     except:
         success = False
 
diff --git a/infrastructure-provisioning/src/general/api/terminate_image.py b/infrastructure-provisioning/src/general/api/terminate_image.py
index 99a5164..c1427be 100644
--- a/infrastructure-provisioning/src/general/api/terminate_image.py
+++ b/infrastructure-provisioning/src/general/api/terminate_image.py
@@ -29,7 +29,7 @@ import subprocess
 if __name__ == "__main__":
     success = True
     try:
-        subprocess.run('cd /root; fab terminate_image', shell=True)
+        subprocess.run('cd /root; fab terminate_image', shell=True, check=True)
     except:
         success = False
 
@@ -57,7 +57,7 @@ if __name__ == "__main__":
         response_file.write(json.dumps(reply))
 
     try:
-        subprocess.run('chmod 666 /response/*', shell=True)
+        subprocess.run('chmod 666 /response/*', shell=True, check=True)
     except:
         success = False
 
diff --git a/infrastructure-provisioning/src/general/lib/aws/actions_lib.py b/infrastructure-provisioning/src/general/lib/aws/actions_lib.py
index b92f95b..2132c74 100644
--- a/infrastructure-provisioning/src/general/lib/aws/actions_lib.py
+++ b/infrastructure-provisioning/src/general/lib/aws/actions_lib.py
@@ -1392,7 +1392,7 @@ def create_image_from_instance(tag_name='', instance_name='', image_name='', tag
                                           NoReboot=False)
             image.load()
             while image.state != 'available':
-                subprocess.run("echo Waiting for image creation; sleep 20", shell=True)
+                subprocess.run("echo Waiting for image creation; sleep 20", shell=True, check=True)
                 image.load()
             tag = {'Key': 'Name', 'Value': image_name}
             sbn_tag = {'Key': 'SBN', 'Value': os.environ['conf_service_base_name']}
@@ -1428,14 +1428,14 @@ def install_emr_spark(args):
                             '/tmp/spark.tar.gz')
     s3_client.download_file(args.bucket, args.project_name + '/' + args.cluster_name + '/spark-checksum.chk',
                             '/tmp/spark-checksum.chk')
-    if 'WARNING' in subprocess.run('md5sum -c /tmp/spark-checksum.chk', capture_output=True, shell=True):
-        subprocess.run('rm -f /tmp/spark.tar.gz', shell=True)
+    if 'WARNING' in subprocess.run('md5sum -c /tmp/spark-checksum.chk', capture_output=True, shell=True, check=True):
+        subprocess.run('rm -f /tmp/spark.tar.gz', shell=True, check=True)
         s3_client.download_file(args.bucket, args.project_name + '/' + args.cluster_name + '/spark.tar.gz',
                                 '/tmp/spark.tar.gz')
-        if 'WARNING' in subprocess.run('md5sum -c /tmp/spark-checksum.chk', capture_output=True, shell=True):
+        if 'WARNING' in subprocess.run('md5sum -c /tmp/spark-checksum.chk', capture_output=True, shell=True, check=True):
             print("The checksum of spark.tar.gz is mismatched. It could be caused by aws network issue.")
             sys.exit(1)
-    subprocess.run('sudo tar -zhxvf /tmp/spark.tar.gz -C /opt/' + args.emr_version + '/' + args.cluster_name + '/', shell=True)
+    subprocess.run('sudo tar -zhxvf /tmp/spark.tar.gz -C /opt/' + args.emr_version + '/' + args.cluster_name + '/', shell=True, check=True)
 
 
 def jars(args, emr_dir):
@@ -1443,13 +1443,13 @@ def jars(args, emr_dir):
     s3_client = boto3.client('s3', config=Config(signature_version='s3v4'), region_name=args.region)
     s3_client.download_file(args.bucket, 'jars/' + args.emr_version + '/jars.tar.gz', '/tmp/jars.tar.gz')
     s3_client.download_file(args.bucket, 'jars/' + args.emr_version + '/jars-checksum.chk', '/tmp/jars-checksum.chk')
-    if 'WARNING' in subprocess.run('md5sum -c /tmp/jars-checksum.chk', capture_output=True, shell=True):
-        subprocess.run('rm -f /tmp/jars.tar.gz', shell=True)
+    if 'WARNING' in subprocess.run('md5sum -c /tmp/jars-checksum.chk', capture_output=True, shell=True, check=True):
+        subprocess.run('rm -f /tmp/jars.tar.gz', shell=True, check=True)
         s3_client.download_file(args.bucket, 'jars/' + args.emr_version + '/jars.tar.gz', '/tmp/jars.tar.gz')
-        if 'WARNING' in subprocess.run('md5sum -c /tmp/jars-checksum.chk', capture_output=True, shell=True):
+        if 'WARNING' in subprocess.run('md5sum -c /tmp/jars-checksum.chk', capture_output=True, shell=True, check=True):
             print("The checksum of jars.tar.gz is mismatched. It could be caused by aws network issue.")
             sys.exit(1)
-    subprocess.run('tar -zhxvf /tmp/jars.tar.gz -C ' + emr_dir, shell=True)
+    subprocess.run('tar -zhxvf /tmp/jars.tar.gz -C ' + emr_dir, shell=True, check=True)
 
 
 def yarn(args, yarn_dir):
@@ -1463,8 +1463,8 @@ def yarn(args, yarn_dir):
         s3client = boto3.client('s3', config=Config(signature_version='s3v4'), region_name=args.region)
         s3resource = boto3.resource('s3', config=Config(signature_version='s3v4'))
     get_files(s3client, s3resource, args.project_name + '/' + args.cluster_name + '/config/', args.bucket, yarn_dir)
-    subprocess.run('sudo mv ' + yarn_dir + args.project_name + '/' + args.cluster_name + '/config/* ' + yarn_dir, shell=True)
-    subprocess.run('sudo rm -rf ' + yarn_dir + args.project_name + '/', shell=True)
+    subprocess.run('sudo mv ' + yarn_dir + args.project_name + '/' + args.cluster_name + '/config/* ' + yarn_dir, shell=True, check=True)
+    subprocess.run('sudo rm -rf ' + yarn_dir + args.project_name + '/', shell=True, check=True)
 
 
 def get_files(s3client, s3resource, dist, bucket, local):
@@ -1499,8 +1499,8 @@ def get_gitlab_cert(bucket, certfile):
 def create_aws_config_files(generate_full_config=False):
     try:
         aws_user_dir = os.environ['AWS_DIR']
-        logging.info(subprocess.run("rm -rf " + aws_user_dir + " 2>&1", capture_output=True, shell=True))
-        logging.info(subprocess.run("mkdir -p " + aws_user_dir + " 2>&1", capture_output=True, shell=True))
+        logging.info(subprocess.run("rm -rf " + aws_user_dir + " 2>&1", capture_output=True, shell=True, check=True))
+        logging.info(subprocess.run("mkdir -p " + aws_user_dir + " 2>&1", capture_output=True, shell=True, check=True))
 
         with open(aws_user_dir + '/config', 'w') as aws_file:
             aws_file.write("[default]\n")
@@ -1512,8 +1512,8 @@ def create_aws_config_files(generate_full_config=False):
                 aws_file.write("aws_access_key_id = {}\n".format(os.environ['aws_access_key']))
                 aws_file.write("aws_secret_access_key = {}\n".format(os.environ['aws_secret_access_key']))
 
-        logging.info(subprocess.run("chmod 600 " + aws_user_dir + "/*" + " 2>&1", capture_output=True, shell=True))
-        logging.info(subprocess.run("chmod 550 " + aws_user_dir + " 2>&1", capture_output=True, shell=True))
+        logging.info(subprocess.run("chmod 600 " + aws_user_dir + "/*" + " 2>&1", capture_output=True, shell=True, check=True))
+        logging.info(subprocess.run("chmod 550 " + aws_user_dir + " 2>&1", capture_output=True, shell=True, check=True))
 
         return True
     except Exception as err:
@@ -1528,82 +1528,82 @@ def installing_python(region, bucket, user_name, cluster_name, application='', p
     python_version = python_version[0:5]
     if not os.path.exists('/opt/python/python' + python_version):
         subprocess.run('wget https://www.python.org/ftp/python/' + python_version +
-              '/Python-' + python_version + '.tgz -O /tmp/Python-' + python_version + '.tgz', shell=True)
-        subprocess.run('tar zxvf /tmp/Python-' + python_version + '.tgz -C /tmp/', shell=True)
-        subprocess.run('cd /tmp/Python-{0}; ./configure --prefix=/opt/python/python{0} --with-zlib-dir=/usr/local/lib/ --with-ensurepip=install'.format(python_version), shell=True)
-        subprocess.run('cd /tmp/Python-{0}; sudo make altinstall'.format(python_version), shell=True)
-        subprocess.run('cd /tmp/; sudo rm -rf Python-' + python_version + '/', shell=True)
+              '/Python-' + python_version + '.tgz -O /tmp/Python-' + python_version + '.tgz', shell=True, check=True)
+        subprocess.run('tar zxvf /tmp/Python-' + python_version + '.tgz -C /tmp/', shell=True, check=True)
+        subprocess.run('cd /tmp/Python-{0}; ./configure --prefix=/opt/python/python{0} --with-zlib-dir=/usr/local/lib/ --with-ensurepip=install'.format(python_version), shell=True, check=True)
+        subprocess.run('cd /tmp/Python-{0}; sudo make altinstall'.format(python_version), shell=True, check=True)
+        subprocess.run('cd /tmp/; sudo rm -rf Python-' + python_version + '/', shell=True, check=True)
         if region == 'cn-north-1':
             subprocess.run('sudo -i /opt/python/python{}/bin/python{} -m pip install -U pip=={} --no-cache-dir'.format(
-                python_version, python_version[0:3], os.environ['conf_pip_version']), shell=True)
-            subprocess.run('sudo mv /etc/pip.conf /etc/back_pip.conf', shell=True)
-            subprocess.run('sudo touch /etc/pip.conf', shell=True)
-            subprocess.run('sudo echo "[global]" >> /etc/pip.conf', shell=True)
-            subprocess.run('sudo echo "timeout = 600" >> /etc/pip.conf', shell=True)
-        subprocess.run('sudo -i virtualenv /opt/python/python' + python_version, shell=True)
+                python_version, python_version[0:3], os.environ['conf_pip_version']), shell=True, check=True)
+            subprocess.run('sudo mv /etc/pip.conf /etc/back_pip.conf', shell=True, check=True)
+            subprocess.run('sudo touch /etc/pip.conf', shell=True, check=True)
+            subprocess.run('sudo echo "[global]" >> /etc/pip.conf', shell=True, check=True)
+            subprocess.run('sudo echo "timeout = 600" >> /etc/pip.conf', shell=True, check=True)
+        subprocess.run('sudo -i virtualenv /opt/python/python' + python_version, shell=True, check=True)
         venv_command = '/bin/bash /opt/python/python' + python_version + '/bin/activate'
         pip_command = '/opt/python/python' + python_version + '/bin/pip' + python_version[:3]
         if region == 'cn-north-1':
             try:
                 subprocess.run(venv_command + ' && sudo -i ' + pip_command +
                       ' install -i https://{0}/simple --trusted-host {0} --timeout 60000 -U pip==9.0.3 '
-                      '--no-cache-dir'.format(pip_mirror), shell=True)
-                subprocess.run(venv_command + ' && sudo -i ' + pip_command + ' install pyzmq==17.0.0', shell=True)
+                      '--no-cache-dir'.format(pip_mirror), shell=True, check=True)
+                subprocess.run(venv_command + ' && sudo -i ' + pip_command + ' install pyzmq==17.0.0', shell=True, check=True)
                 subprocess.run(venv_command + ' && sudo -i ' + pip_command +
                       ' install -i https://{0}/simple --trusted-host {0} --timeout 60000 ipython ipykernel '
-                      '--no-cache-dir'.format(pip_mirror), shell=True)
-                subprocess.run(venv_command + ' && sudo -i ' + pip_command + ' install NumPy=={0}'.format(numpy_version), shell=True)
+                      '--no-cache-dir'.format(pip_mirror), shell=True, check=True)
+                subprocess.run(venv_command + ' && sudo -i ' + pip_command + ' install NumPy=={0}'.format(numpy_version), shell=True, check=True)
                 subprocess.run(venv_command + ' && sudo -i ' + pip_command +
                       ' install -i https://{0}/simple --trusted-host {0} --timeout 60000 boto boto3 SciPy '
-                      'Matplotlib==2.0.2 pandas Sympy Pillow sklearn --no-cache-dir'.format(pip_mirror), shell=True)
+                      'Matplotlib==2.0.2 pandas Sympy Pillow sklearn --no-cache-dir'.format(pip_mirror), shell=True, check=True)
                 # Need to refactor when we add GPU cluster
                 if application == 'deeplearning':
                     subprocess.run(venv_command + ' && sudo -i ' + pip_command +
                           ' install -i https://{0}/simple --trusted-host {0} --timeout 60000 mxnet-cu80 opencv-python '
-                          'keras Theano --no-cache-dir'.format(pip_mirror), shell=True)
+                          'keras Theano --no-cache-dir'.format(pip_mirror), shell=True, check=True)
                     python_without_dots = python_version.replace('.', '')
                     subprocess.run(venv_command + ' && sudo -i ' + pip_command +
                           ' install  https://cntk.ai/PythonWheel/GPU/cntk-2.0rc3-cp{0}-cp{0}m-linux_x86_64.whl '
-                          '--no-cache-dir'.format(python_without_dots[:2]), shell=True)
-                subprocess.run('sudo rm /etc/pip.conf', shell=True)
-                subprocess.run('sudo mv /etc/back_pip.conf /etc/pip.conf', shell=True)
+                          '--no-cache-dir'.format(python_without_dots[:2]), shell=True, check=True)
+                subprocess.run('sudo rm /etc/pip.conf', shell=True, check=True)
+                subprocess.run('sudo mv /etc/back_pip.conf /etc/pip.conf', shell=True, check=True)
             except:
-                subprocess.run('sudo rm /etc/pip.conf', shell=True)
-                subprocess.run('sudo mv /etc/back_pip.conf /etc/pip.conf', shell=True)
-                subprocess.run('sudo rm -rf /opt/python/python{}/'.format(python_version), shell=True)
+                subprocess.run('sudo rm /etc/pip.conf', shell=True, check=True)
+                subprocess.run('sudo mv /etc/back_pip.conf /etc/pip.conf', shell=True, check=True)
+                subprocess.run('sudo rm -rf /opt/python/python{}/'.format(python_version), shell=True, check=True)
                 sys.exit(1)
         else:
-            subprocess.run(venv_command + ' && sudo -i ' + pip_command + ' install -U pip==9.0.3', shell=True)
-            subprocess.run(venv_command + ' && sudo -i ' + pip_command + ' install pyzmq==17.0.0', shell=True)
-            subprocess.run(venv_command + ' && sudo -i ' + pip_command + ' install ipython ipykernel --no-cache-dir', shell=True)
-            subprocess.run(venv_command + ' && sudo -i ' + pip_command + ' install NumPy=={}'.format(numpy_version), shell=True)
+            subprocess.run(venv_command + ' && sudo -i ' + pip_command + ' install -U pip==9.0.3', shell=True, check=True)
+            subprocess.run(venv_command + ' && sudo -i ' + pip_command + ' install pyzmq==17.0.0', shell=True, check=True)
+            subprocess.run(venv_command + ' && sudo -i ' + pip_command + ' install ipython ipykernel --no-cache-dir', shell=True, check=True)
+            subprocess.run(venv_command + ' && sudo -i ' + pip_command + ' install NumPy=={}'.format(numpy_version), shell=True, check=True)
             subprocess.run(venv_command + ' && sudo -i ' + pip_command +
                   ' install boto boto3 SciPy Matplotlib==2.0.2 pandas Sympy Pillow sklearn '
-                  '--no-cache-dir', shell=True)
+                  '--no-cache-dir', shell=True, check=True)
             # Need to refactor when we add GPU cluster
             if application == 'deeplearning':
                 subprocess.run(venv_command + ' && sudo -i ' + pip_command +
-                      ' install mxnet-cu80 opencv-python keras Theano --no-cache-dir', shell=True)
+                      ' install mxnet-cu80 opencv-python keras Theano --no-cache-dir', shell=True, check=True)
                 python_without_dots = python_version.replace('.', '')
                 subprocess.run(venv_command + ' && sudo -i ' + pip_command +
                       ' install  https://cntk.ai/PythonWheel/GPU/cntk-2.0rc3-cp{0}-cp{0}m-linux_x86_64.whl '
-                      '--no-cache-dir'.format(python_without_dots[:2]), shell=True)
-        subprocess.run('sudo rm -rf /usr/bin/python{}-dp'.format(python_version[0:3]), shell=True)
+                      '--no-cache-dir'.format(python_without_dots[:2]), shell=True, check=True)
+        subprocess.run('sudo rm -rf /usr/bin/python{}-dp'.format(python_version[0:3]), shell=True, check=True)
         subprocess.run('sudo ln -fs /opt/python/python{0}/bin/python{1} /usr/bin/python{1}-dp'.format(python_version,
-                                                                                             python_version[0:3]), shell=True)
+                                                                                             python_version[0:3]), shell=True, check=True)
 
 
 def spark_defaults(args):
     spark_def_path = '/opt/' + args.emr_version + '/' + args.cluster_name + '/spark/conf/spark-defaults.conf'
     for i in eval(args.excluded_lines):
-        subprocess.run(""" sudo bash -c " sed -i '/""" + i + """/d' """ + spark_def_path + """ " """, shell=True)
-    subprocess.run(""" sudo bash -c " sed -i '/#/d' """ + spark_def_path + """ " """, shell=True)
-    subprocess.run(""" sudo bash -c " sed -i '/^\s*$/d' """ + spark_def_path + """ " """, shell=True)
+        subprocess.run(""" sudo bash -c " sed -i '/""" + i + """/d' """ + spark_def_path + """ " """, shell=True, check=True)
+    subprocess.run(""" sudo bash -c " sed -i '/#/d' """ + spark_def_path + """ " """, shell=True, check=True)
+    subprocess.run(""" sudo bash -c " sed -i '/^\s*$/d' """ + spark_def_path + """ " """, shell=True, check=True)
     subprocess.run(""" sudo bash -c "sed -i '/spark.driver.extraClassPath/,/spark.driver.extraLibraryPath/s|"""
-          """/usr|/opt/DATAENGINE-SERVICE_VERSION/jars/usr|g' """ + spark_def_path + """ " """, shell=True)
+          """/usr|/opt/DATAENGINE-SERVICE_VERSION/jars/usr|g' """ + spark_def_path + """ " """, shell=True, check=True)
     subprocess.run(
         """ sudo bash -c "sed -i '/spark.yarn.dist.files/s/\/etc\/spark\/conf/\/opt\/DATAENGINE-SERVICE_VERSION\/CLUSTER\/conf/g' """
-        + spark_def_path + """ " """, shell=True)
+        + spark_def_path + """ " """, shell=True, check=True)
     template_file = spark_def_path
     with open(template_file, 'r') as f:
         text = f.read()
@@ -1618,8 +1618,8 @@ def spark_defaults(args):
     else:
         endpoint_url = 'https://s3-' + args.region + '.amazonaws.com'
     subprocess.run("""bash -c 'echo "spark.hadoop.fs.s3a.endpoint    """ + endpoint_url + """ " >> """ +
-          spark_def_path + """'""", shell=True)
-    subprocess.run('echo "spark.hadoop.fs.s3a.server-side-encryption-algorithm   AES256" >> {}'.format(spark_def_path), shell=True)
+          spark_def_path + """'""", shell=True, check=True)
+    subprocess.run('echo "spark.hadoop.fs.s3a.server-side-encryption-algorithm   AES256" >> {}'.format(spark_def_path), shell=True, check=True)
 
 
 def ensure_local_jars(os_user, jars_dir):
@@ -1718,49 +1718,49 @@ def configure_zeppelin_emr_interpreter(emr_version, cluster_name, region, spark_
                      "/jars/usr/lib/hadoop-lzo/lib/hadoop-lzo-*.jar".format(emr_version)
         # fix due to: Multiple py4j files found under ..../spark/python/lib
         # py4j-0.10.7-src.zip still in folder. Versions may varies.
-        subprocess.run('rm /opt/{0}/{1}/spark/python/lib/py4j-src.zip'.format(emr_version, cluster_name), shell=True)
+        subprocess.run('rm /opt/{0}/{1}/spark/python/lib/py4j-src.zip'.format(emr_version, cluster_name), shell=True, check=True)
 
-        subprocess.run('echo \"Configuring emr path for Zeppelin\"', shell=True)
+        subprocess.run('echo \"Configuring emr path for Zeppelin\"', shell=True, check=True)
         subprocess.run('sed -i \"s/^export SPARK_HOME.*/export SPARK_HOME=\/opt\/{0}\/{1}\/spark/\" '
-              '/opt/zeppelin/conf/zeppelin-env.sh'.format(emr_version, cluster_name), shell=True)
+              '/opt/zeppelin/conf/zeppelin-env.sh'.format(emr_version, cluster_name), shell=True, check=True)
         subprocess.run('sed -i "s/^export HADOOP_CONF_DIR.*/export HADOOP_CONF_DIR=' + \
-              '\/opt\/{0}\/{1}\/conf/" /opt/{0}/{1}/spark/conf/spark-env.sh'.format(emr_version, cluster_name), shell=True)
+              '\/opt\/{0}\/{1}\/conf/" /opt/{0}/{1}/spark/conf/spark-env.sh'.format(emr_version, cluster_name), shell=True, check=True)
         subprocess.run('echo \"spark.jars $(ls {0} | tr \'\\n\' \',\')\" >> /opt/{1}/{2}/spark/conf/spark-defaults.conf'
-              .format(spark_libs, emr_version, cluster_name), shell=True)
+              .format(spark_libs, emr_version, cluster_name), shell=True, check=True)
         subprocess.run('sed -i "/spark.executorEnv.PYTHONPATH/d" /opt/{0}/{1}/spark/conf/spark-defaults.conf'
-              .format(emr_version, cluster_name), shell=True)
+              .format(emr_version, cluster_name), shell=True, check=True)
         subprocess.run('sed -i "/spark.yarn.dist.files/d" /opt/{0}/{1}/spark/conf/spark-defaults.conf'
-              .format(emr_version, cluster_name), shell=True)
-        subprocess.run('sudo chown {0}:{0} -R /opt/zeppelin/'.format(os_user), shell=True)
-        subprocess.run('sudo systemctl daemon-reload', shell=True)
-        subprocess.run('sudo service zeppelin-notebook stop', shell=True)
-        subprocess.run('sudo service zeppelin-notebook start', shell=True)
+              .format(emr_version, cluster_name), shell=True, check=True)
+        subprocess.run('sudo chown {0}:{0} -R /opt/zeppelin/'.format(os_user), shell=True, check=True)
+        subprocess.run('sudo systemctl daemon-reload', shell=True, check=True)
+        subprocess.run('sudo service zeppelin-notebook stop', shell=True, check=True)
+        subprocess.run('sudo service zeppelin-notebook start', shell=True, check=True)
         while not zeppelin_restarted:
-            subprocess.run('sleep 5', shell=True)
-            result = subprocess.run('sudo bash -c "nmap -p 8080 localhost | grep closed > /dev/null" ; echo $?', capture_output=True, shell=True)
+            subprocess.run('sleep 5', shell=True, check=True)
+            result = subprocess.run('sudo bash -c "nmap -p 8080 localhost | grep closed > /dev/null" ; echo $?', capture_output=True, shell=True, check=True)
             result = result[:1]
             if result == '1':
                 zeppelin_restarted = True
-        subprocess.run('sleep 5', shell=True)
-        subprocess.run('echo \"Configuring emr spark interpreter for Zeppelin\"', shell=True)
+        subprocess.run('sleep 5', shell=True, check=True)
+        subprocess.run('echo \"Configuring emr spark interpreter for Zeppelin\"', shell=True, check=True)
         if multiple_emrs == 'true':
             while not port_number_found:
                 port_free = subprocess.run('sudo bash -c "nmap -p ' + str(default_port) +
-                                  ' localhost | grep closed > /dev/null" ; echo $?', capture_output=True, shell=True)
+                                  ' localhost | grep closed > /dev/null" ; echo $?', capture_output=True, shell=True, check=True)
                 port_free = port_free[:1]
                 if port_free == '0':
                     livy_port = default_port
                     port_number_found = True
                 else:
                     default_port += 1
-            subprocess.run('sudo echo "livy.server.port = {0}" >> {1}conf/livy.conf'.format(str(livy_port), livy_path), shell=True)
-            subprocess.run('sudo echo "livy.spark.master = yarn" >> {}conf/livy.conf'.format(livy_path), shell=True)
+            subprocess.run('sudo echo "livy.server.port = {0}" >> {1}conf/livy.conf'.format(str(livy_port), livy_path), shell=True, check=True)
+            subprocess.run('sudo echo "livy.spark.master = yarn" >> {}conf/livy.conf'.format(livy_path), shell=True, check=True)
             if os.path.exists('{}conf/spark-blacklist.conf'.format(livy_path)):
-                subprocess.run('sudo sed -i "s/^/#/g" {}conf/spark-blacklist.conf'.format(livy_path), shell=True)
-            subprocess.run(''' sudo echo "export SPARK_HOME={0}" >> {1}conf/livy-env.sh'''.format(spark_dir, livy_path), shell=True)
-            subprocess.run(''' sudo echo "export HADOOP_CONF_DIR={0}" >> {1}conf/livy-env.sh'''.format(yarn_dir, livy_path), shell=True)
+                subprocess.run('sudo sed -i "s/^/#/g" {}conf/spark-blacklist.conf'.format(livy_path), shell=True, check=True)
+            subprocess.run(''' sudo echo "export SPARK_HOME={0}" >> {1}conf/livy-env.sh'''.format(spark_dir, livy_path), shell=True, check=True)
+            subprocess.run(''' sudo echo "export HADOOP_CONF_DIR={0}" >> {1}conf/livy-env.sh'''.format(yarn_dir, livy_path), shell=True, check=True)
             subprocess.run(''' sudo echo "export PYSPARK3_PYTHON=python{0}" >> {1}conf/livy-env.sh'''.format(python_version[0:3],
-                                                                                                    livy_path), shell=True)
+                                                                                                    livy_path), shell=True, check=True)
             template_file = "/tmp/dataengine-service_interpreter.json"
             fr = open(template_file, 'r+')
             text = fr.read()
@@ -1774,20 +1774,20 @@ def configure_zeppelin_emr_interpreter(emr_version, cluster_name, region, spark_
             for _ in range(5):
                 try:
                     subprocess.run("curl --noproxy localhost -H 'Content-Type: application/json' -X POST -d " +
-                          "@/tmp/dataengine-service_interpreter.json http://localhost:8080/api/interpreter/setting", shell=True)
+                          "@/tmp/dataengine-service_interpreter.json http://localhost:8080/api/interpreter/setting", shell=True, check=True)
                     break
                 except:
-                    subprocess.run('sleep 5', shell=True)
+                    subprocess.run('sleep 5', shell=True, check=True)
             subprocess.run('sudo cp /opt/livy-server-cluster.service /etc/systemd/system/livy-server-{}.service'
-                  .format(str(livy_port)), shell=True)
+                  .format(str(livy_port)), shell=True, check=True)
             subprocess.run("sudo sed -i 's|OS_USER|{0}|' /etc/systemd/system/livy-server-{1}.service"
-                  .format(os_user, str(livy_port)), shell=True)
+                  .format(os_user, str(livy_port)), shell=True, check=True)
             subprocess.run("sudo sed -i 's|LIVY_PATH|{0}|' /etc/systemd/system/livy-server-{1}.service"
-                  .format(livy_path, str(livy_port)), shell=True)
-            subprocess.run('sudo chmod 644 /etc/systemd/system/livy-server-{}.service'.format(str(livy_port)), shell=True)
-            subprocess.run("sudo systemctl daemon-reload", shell=True)
-            subprocess.run("sudo systemctl enable livy-server-{}".format(str(livy_port)), shell=True)
-            subprocess.run('sudo systemctl start livy-server-{}'.format(str(livy_port)), shell=True)
+                  .format(livy_path, str(livy_port)), shell=True, check=True)
+            subprocess.run('sudo chmod 644 /etc/systemd/system/livy-server-{}.service'.format(str(livy_port)), shell=True, check=True)
+            subprocess.run("sudo systemctl daemon-reload", shell=True, check=True)
+            subprocess.run("sudo systemctl enable livy-server-{}".format(str(livy_port)), shell=True, check=True)
+            subprocess.run('sudo systemctl start livy-server-{}'.format(str(livy_port)), shell=True, check=True)
         else:
             template_file = "/tmp/dataengine-service_interpreter.json"
             p_versions = ["2", "{}-dp".format(python_version[:3])]
@@ -1808,19 +1808,19 @@ def configure_zeppelin_emr_interpreter(emr_version, cluster_name, region, spark_
                     try:
                         subprocess.run("curl --noproxy localhost -H 'Content-Type: application/json' -X POST -d " +
                               "@/tmp/emr_spark_py" + p_version +
-                              "_interpreter.json http://localhost:8080/api/interpreter/setting", shell=True)
+                              "_interpreter.json http://localhost:8080/api/interpreter/setting", shell=True, check=True)
                         break
                     except:
-                        subprocess.run('sleep 5', shell=True)
-        subprocess.run('touch /home/' + os_user + '/.ensure_dir/dataengine-service_' + cluster_name + '_interpreter_ensured', shell=True)
+                        subprocess.run('sleep 5', shell=True, check=True)
+        subprocess.run('touch /home/' + os_user + '/.ensure_dir/dataengine-service_' + cluster_name + '_interpreter_ensured', shell=True, check=True)
     except:
         sys.exit(1)
 
 
 def configure_dataengine_spark(cluster_name, jars_dir, cluster_dir, datalake_enabled, spark_configs=''):
     subprocess.run("jar_list=`find {0} -name '*.jar' | tr '\\n' ',' | sed 's/,$//'` ; echo \"spark.jars $jar_list\" >> \
-          /tmp/{1}/notebook_spark-defaults_local.conf".format(jars_dir, cluster_name), shell=True)
-    region = subprocess.run('curl http://169.254.169.254/latest/meta-data/placement/availability-zone', capture_output=True, shell=True)[:-1]
+          /tmp/{1}/notebook_spark-defaults_local.conf".format(jars_dir, cluster_name), shell=True, check=True)
+    region = subprocess.run('curl http://169.254.169.254/latest/meta-data/placement/availability-zone', capture_output=True, shell=True, check=True)[:-1]
     if region == 'us-east-1':
         endpoint_url = 'https://s3.amazonaws.com'
     elif region == 'cn-north-1':
@@ -1828,25 +1828,25 @@ def configure_dataengine_spark(cluster_name, jars_dir, cluster_dir, datalake_ena
     else:
         endpoint_url = 'https://s3-' + region + '.amazonaws.com'
     subprocess.run("""bash -c 'echo "spark.hadoop.fs.s3a.endpoint    """ + endpoint_url +
-          """" >> /tmp/{}/notebook_spark-defaults_local.conf'""".format(cluster_name), shell=True)
+          """" >> /tmp/{}/notebook_spark-defaults_local.conf'""".format(cluster_name), shell=True, check=True)
     subprocess.run('echo "spark.hadoop.fs.s3a.server-side-encryption-algorithm   AES256" >> '
-          '/tmp/{}/notebook_spark-defaults_local.conf'.format(cluster_name), shell=True)
+          '/tmp/{}/notebook_spark-defaults_local.conf'.format(cluster_name), shell=True, check=True)
     if os.path.exists('{0}spark/conf/spark-defaults.conf'.format(cluster_dir)):
         additional_spark_properties = subprocess.run('diff --changed-group-format="%>" --unchanged-group-format="" '
                                             '/tmp/{0}/notebook_spark-defaults_local.conf '
                                             '{1}spark/conf/spark-defaults.conf | grep -v "^#"'.format(
-            cluster_name, cluster_dir), capture_output=True, shell=True)
+            cluster_name, cluster_dir), capture_output=True, shell=True, check=True)
         for property in additional_spark_properties.split('\n'):
-            subprocess.run('echo "{0}" >> /tmp/{1}/notebook_spark-defaults_local.conf'.format(property, cluster_name), shell=True)
+            subprocess.run('echo "{0}" >> /tmp/{1}/notebook_spark-defaults_local.conf'.format(property, cluster_name), shell=True, check=True)
     if os.path.exists('{0}'.format(cluster_dir)):
         subprocess.run('cp -f /tmp/{0}/notebook_spark-defaults_local.conf  {1}spark/conf/spark-defaults.conf'.format(cluster_name,
-                                                                                                        cluster_dir), shell=True)
+                                                                                                        cluster_dir), shell=True, check=True)
     if spark_configs and os.path.exists('{0}'.format(cluster_dir)):
         datalab_header = subprocess.run('cat /tmp/{0}/notebook_spark-defaults_local.conf | grep "^#"'.format(cluster_name),
-                               capture_output=True, shell=True)
+                               capture_output=True, shell=True, check=True)
         spark_configurations = ast.literal_eval(spark_configs)
         new_spark_defaults = list()
-        spark_defaults = subprocess.run('cat {0}spark/conf/spark-defaults.conf'.format(cluster_dir), capture_output=True, shell=True)
+        spark_defaults = subprocess.run('cat {0}spark/conf/spark-defaults.conf'.format(cluster_dir), capture_output=True, shell=True, check=True)
         current_spark_properties = spark_defaults.split('\n')
         for param in current_spark_properties:
             if param.split(' ')[0] != '#':
@@ -1859,11 +1859,11 @@ def configure_dataengine_spark(cluster_name, jars_dir, cluster_dir, datalake_ena
                                 new_spark_defaults.append(property + ' ' + config['Properties'][property])
                 new_spark_defaults.append(param)
         new_spark_defaults = set(new_spark_defaults)
-        subprocess.run("echo '{0}' > {1}/spark/conf/spark-defaults.conf".format(datalab_header, cluster_dir), shell=True)
+        subprocess.run("echo '{0}' > {1}/spark/conf/spark-defaults.conf".format(datalab_header, cluster_dir), shell=True, check=True)
         for prop in new_spark_defaults:
             prop = prop.rstrip()
-            subprocess.run('echo "{0}" >> {1}/spark/conf/spark-defaults.conf'.format(prop, cluster_dir), shell=True)
-        subprocess.run('sed -i "/^\s*$/d" {0}/spark/conf/spark-defaults.conf'.format(cluster_dir), shell=True)
+            subprocess.run('echo "{0}" >> {1}/spark/conf/spark-defaults.conf'.format(prop, cluster_dir), shell=True, check=True)
+        subprocess.run('sed -i "/^\s*$/d" {0}/spark/conf/spark-defaults.conf'.format(cluster_dir), shell=True, check=True)
 
 
 def remove_dataengine_kernels(tag_name, notebook_name, os_user, key_path, cluster_name):
@@ -1956,12 +1956,12 @@ def ensure_local_spark(os_user, spark_link, spark_version, hadoop_version, local
 def install_dataengine_spark(cluster_name, spark_link, spark_version, hadoop_version, cluster_dir, os_user,
                              datalake_enabled):
     subprocess.run('wget ' + spark_link + ' -O /tmp/' + cluster_name + '/spark-' + spark_version + '-bin-hadoop' +
-          hadoop_version + '.tgz', shell=True)
+          hadoop_version + '.tgz', shell=True, check=True)
     subprocess.run('tar -zxvf /tmp/' + cluster_name + '/spark-' + spark_version + '-bin-hadoop' + hadoop_version +
-          '.tgz -C /opt/' + cluster_name, shell=True)
+          '.tgz -C /opt/' + cluster_name, shell=True, check=True)
     subprocess.run('mv /opt/' + cluster_name + '/spark-' + spark_version + '-bin-hadoop' + hadoop_version + ' ' +
-          cluster_dir + 'spark/', shell=True)
-    subprocess.run('chown -R ' + os_user + ':' + os_user + ' ' + cluster_dir + 'spark/', shell=True)
+          cluster_dir + 'spark/', shell=True, check=True)
+    subprocess.run('chown -R ' + os_user + ':' + os_user + ' ' + cluster_dir + 'spark/', shell=True, check=True)
 
 
 def find_des_jars(all_jars, des_path):
diff --git a/infrastructure-provisioning/src/general/lib/aws/meta_lib.py b/infrastructure-provisioning/src/general/lib/aws/meta_lib.py
index 27ec19f..b2300e0 100644
--- a/infrastructure-provisioning/src/general/lib/aws/meta_lib.py
+++ b/infrastructure-provisioning/src/general/lib/aws/meta_lib.py
@@ -679,7 +679,7 @@ def check_security_group(security_group_name, count=0):
 def emr_waiter(tag_name, tag_value):
     if len(get_emr_list(tag_value, 'Value', False, True)) > 0 or os.path.exists('/response/.emr_creating_' + os.environ['exploratory_name']) or get_not_configured_emr(tag_name, tag_value):
         with hide('stderr', 'running', 'warnings'):
-            subprocess.run("echo 'Some EMR cluster is still being created/terminated, waiting..'", shell=True)
+            subprocess.run("echo 'Some EMR cluster is still being created/terminated, waiting..'", shell=True, check=True)
         time.sleep(60)
         emr_waiter(tag_name, tag_value)
     else:
diff --git a/infrastructure-provisioning/src/general/lib/azure/actions_lib.py b/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
index 3ec57ce..f0d6c67 100644
--- a/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
+++ b/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
@@ -1180,27 +1180,27 @@ def configure_local_spark(jars_dir, templates_dir, memory_type='driver'):
 
 def configure_dataengine_spark(cluster_name, jars_dir, cluster_dir, datalake_enabled, spark_configs=''):
     subprocess.run("jar_list=`find {0} -name '*.jar' | tr '\\n' ',' | sed 's/,$//'` ; echo \"spark.jars $jar_list\" >> \
-          /tmp/{1}/notebook_spark-defaults_local.conf".format(jars_dir, cluster_name), shell=True)
+          /tmp/{1}/notebook_spark-defaults_local.conf".format(jars_dir, cluster_name), shell=True, check=True)
     if os.path.exists('{0}spark/conf/spark-defaults.conf'.format(cluster_dir)):
         additional_spark_properties = subprocess.run('diff --changed-group-format="%>" --unchanged-group-format="" '
                                             '/tmp/{0}/notebook_spark-defaults_local.conf '
                                             '{1}spark/conf/spark-defaults.conf | grep -v "^#"'.format(
-                                             cluster_name, cluster_dir), capture_output=True, shell=True)
+                                             cluster_name, cluster_dir), capture_output=True, shell=True, check=True)
         for property in additional_spark_properties.split('\n'):
-            subprocess.run('echo "{0}" >> /tmp/{1}/notebook_spark-defaults_local.conf'.format(property, cluster_name), shell=True)
+            subprocess.run('echo "{0}" >> /tmp/{1}/notebook_spark-defaults_local.conf'.format(property, cluster_name), shell=True, check=True)
     if os.path.exists('{0}'.format(cluster_dir)):
         subprocess.run('cp -f /tmp/{0}/notebook_spark-defaults_local.conf  {1}spark/conf/spark-defaults.conf'.format(cluster_name,
-                                                                                                        cluster_dir), shell=True)
+                                                                                                        cluster_dir), shell=True, check=True)
     if datalake_enabled == 'false':
-        subprocess.run('cp -f /opt/spark/conf/core-site.xml {}spark/conf/'.format(cluster_dir), shell=True)
+        subprocess.run('cp -f /opt/spark/conf/core-site.xml {}spark/conf/'.format(cluster_dir), shell=True, check=True)
     else:
-        subprocess.run('cp -f /opt/hadoop/etc/hadoop/core-site.xml {}hadoop/etc/hadoop/core-site.xml'.format(cluster_dir), shell=True)
+        subprocess.run('cp -f /opt/hadoop/etc/hadoop/core-site.xml {}hadoop/etc/hadoop/core-site.xml'.format(cluster_dir), shell=True, check=True)
     if spark_configs and os.path.exists('{0}'.format(cluster_dir)):
         datalab_header = subprocess.run('cat /tmp/{0}/notebook_spark-defaults_local.conf | grep "^#"'.format(cluster_name),
-                               capture_output=True, shell=True)
+                               capture_output=True, shell=True, check=True)
         spark_configurations = ast.literal_eval(spark_configs)
         new_spark_defaults = list()
-        spark_defaults = subprocess.run('cat {0}spark/conf/spark-defaults.conf'.format(cluster_dir), capture_output=True, shell=True)
+        spark_defaults = subprocess.run('cat {0}spark/conf/spark-defaults.conf'.format(cluster_dir), capture_output=True, shell=True, check=True)
         current_spark_properties = spark_defaults.split('\n')
         for param in current_spark_properties:
             if param.split(' ')[0] != '#':
@@ -1213,11 +1213,11 @@ def configure_dataengine_spark(cluster_name, jars_dir, cluster_dir, datalake_ena
                                 new_spark_defaults.append(property + ' ' + config['Properties'][property])
                 new_spark_defaults.append(param)
         new_spark_defaults = set(new_spark_defaults)
-        subprocess.run("echo '{0}' > {1}/spark/conf/spark-defaults.conf".format(datalab_header, cluster_dir), shell=True)
+        subprocess.run("echo '{0}' > {1}/spark/conf/spark-defaults.conf".format(datalab_header, cluster_dir), shell=True, check=True)
         for prop in new_spark_defaults:
             prop = prop.rstrip()
-            subprocess.run('echo "{0}" >> {1}/spark/conf/spark-defaults.conf'.format(prop, cluster_dir), shell=True)
-        subprocess.run('sed -i "/^\s*$/d" {0}/spark/conf/spark-defaults.conf'.format(cluster_dir), shell=True)
+            subprocess.run('echo "{0}" >> {1}/spark/conf/spark-defaults.conf'.format(prop, cluster_dir), shell=True, check=True)
+        subprocess.run('sed -i "/^\s*$/d" {0}/spark/conf/spark-defaults.conf'.format(cluster_dir), shell=True, check=True)
 
 
 def remount_azure_disk(creds=False, os_user='', hostname='', keyfile=''):
@@ -1318,33 +1318,33 @@ def ensure_local_spark(os_user, spark_link, spark_version, hadoop_version, local
 def install_dataengine_spark(cluster_name, spark_link, spark_version, hadoop_version, cluster_dir, os_user, datalake_enabled):
     try:
         if datalake_enabled == 'false':
-            subprocess.run('wget ' + spark_link + ' -O /tmp/' + cluster_name + '/spark-' + spark_version + '-bin-hadoop' + hadoop_version + '.tgz', shell=True)
-            subprocess.run('tar -zxvf /tmp/' + cluster_name + '/spark-' + spark_version + '-bin-hadoop' + hadoop_version + '.tgz -C /opt/', shell=True)
-            subprocess.run('mv /opt/spark-' + spark_version + '-bin-hadoop' + hadoop_version + ' ' + cluster_dir + 'spark/', shell=True)
-            subprocess.run('chown -R ' + os_user + ':' + os_user + ' ' + cluster_dir + 'spark/', shell=True)
+            subprocess.run('wget ' + spark_link + ' -O /tmp/' + cluster_name + '/spark-' + spark_version + '-bin-hadoop' + hadoop_version + '.tgz', shell=True, check=True)
+            subprocess.run('tar -zxvf /tmp/' + cluster_name + '/spark-' + spark_version + '-bin-hadoop' + hadoop_version + '.tgz -C /opt/', shell=True, check=True)
+            subprocess.run('mv /opt/spark-' + spark_version + '-bin-hadoop' + hadoop_version + ' ' + cluster_dir + 'spark/', shell=True, check=True)
+            subprocess.run('chown -R ' + os_user + ':' + os_user + ' ' + cluster_dir + 'spark/', shell=True, check=True)
         else:
             # Downloading Spark without Hadoop
             subprocess.run('wget https://archive.apache.org/dist/spark/spark-{0}/spark-{0}-bin-without-hadoop.tgz -O /tmp/{1}/spark-{0}-bin-without-hadoop.tgz'
-                 .format(spark_version, cluster_name), shell=True)
-            subprocess.run('tar -zxvf /tmp/' + cluster_name + '/spark-{}-bin-without-hadoop.tgz -C /opt/'.format(spark_version), shell=True)
-            subprocess.run('mv /opt/spark-{}-bin-without-hadoop {}spark/'.format(spark_version, cluster_dir), shell=True)
-            subprocess.run('chown -R {0}:{0} {1}/spark/'.format(os_user, cluster_dir), shell=True)
+                 .format(spark_version, cluster_name), shell=True, check=True)
+            subprocess.run('tar -zxvf /tmp/' + cluster_name + '/spark-{}-bin-without-hadoop.tgz -C /opt/'.format(spark_version), shell=True, check=True)
+            subprocess.run('mv /opt/spark-{}-bin-without-hadoop {}spark/'.format(spark_version, cluster_dir), shell=True, check=True)
+            subprocess.run('chown -R {0}:{0} {1}/spark/'.format(os_user, cluster_dir), shell=True, check=True)
             # Downloading Hadoop
             hadoop_version = '3.0.0'
             subprocess.run('wget https://archive.apache.org/dist/hadoop/common/hadoop-{0}/hadoop-{0}.tar.gz -O /tmp/{1}/hadoop-{0}.tar.gz'
-                 .format(hadoop_version, cluster_name), shell=True)
-            subprocess.run('tar -zxvf /tmp/' + cluster_name + '/hadoop-{0}.tar.gz -C /opt/'.format(hadoop_version), shell=True)
-            subprocess.run('mv /opt/hadoop-{0} {1}hadoop/'.format(hadoop_version, cluster_dir), shell=True)
-            subprocess.run('chown -R {0}:{0} {1}hadoop/'.format(os_user, cluster_dir), shell=True)
+                 .format(hadoop_version, cluster_name), shell=True, check=True)
+            subprocess.run('tar -zxvf /tmp/' + cluster_name + '/hadoop-{0}.tar.gz -C /opt/'.format(hadoop_version), shell=True, check=True)
+            subprocess.run('mv /opt/hadoop-{0} {1}hadoop/'.format(hadoop_version, cluster_dir), shell=True, check=True)
+            subprocess.run('chown -R {0}:{0} {1}hadoop/'.format(os_user, cluster_dir), shell=True, check=True)
             # Configuring Hadoop and Spark
             java_path = datalab.common_lib.find_java_path_local()
-            subprocess.run('echo "export JAVA_HOME={}" >> {}hadoop/etc/hadoop/hadoop-env.sh'.format(java_path, cluster_dir), shell=True)
-            subprocess.run("""echo 'export HADOOP_CLASSPATH="$HADOOP_HOME/share/hadoop/tools/lib/*"' >> {}hadoop/etc/hadoop/hadoop-env.sh""".format(cluster_dir), shell=True)
-            subprocess.run('echo "export HADOOP_HOME={0}hadoop/" >> {0}spark/conf/spark-env.sh'.format(cluster_dir), shell=True)
-            subprocess.run('echo "export SPARK_HOME={0}spark/" >> {0}spark/conf/spark-env.sh'.format(cluster_dir), shell=True)
-            spark_dist_classpath = subprocess.run('{}hadoop/bin/hadoop classpath'.format(cluster_dir), capture_output=True, shell=True)
+            subprocess.run('echo "export JAVA_HOME={}" >> {}hadoop/etc/hadoop/hadoop-env.sh'.format(java_path, cluster_dir), shell=True, check=True)
+            subprocess.run("""echo 'export HADOOP_CLASSPATH="$HADOOP_HOME/share/hadoop/tools/lib/*"' >> {}hadoop/etc/hadoop/hadoop-env.sh""".format(cluster_dir), shell=True, check=True)
+            subprocess.run('echo "export HADOOP_HOME={0}hadoop/" >> {0}spark/conf/spark-env.sh'.format(cluster_dir), shell=True, check=True)
+            subprocess.run('echo "export SPARK_HOME={0}spark/" >> {0}spark/conf/spark-env.sh'.format(cluster_dir), shell=True, check=True)
+            spark_dist_classpath = subprocess.run('{}hadoop/bin/hadoop classpath'.format(cluster_dir), capture_output=True, shell=True, check=True)
             subprocess.run('echo "export SPARK_DIST_CLASSPATH={}" >> {}spark/conf/spark-env.sh'.format(
-                spark_dist_classpath, cluster_dir), shell=True)
+                spark_dist_classpath, cluster_dir), shell=True, check=True)
     except:
         sys.exit(1)
 
diff --git a/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py b/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py
index f65cad4..8fb5fd0 100644
--- a/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py
+++ b/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py
@@ -1095,49 +1095,49 @@ class GCPActions:
         print("Downloading jars...")
         GCPActions().get_from_bucket(args.bucket, 'jars/{0}/jars.tar.gz'.format(args.dataproc_version), '/tmp/jars.tar.gz')
         GCPActions().get_from_bucket(args.bucket, 'jars/{0}/jars-checksum.chk'.format(args.dataproc_version), '/tmp/jars-checksum.chk')
-        if 'WARNING' in subprocess.run('md5sum -c /tmp/jars-checksum.chk', capture_output=True, shell=True):
-            subprocess.run('rm -f /tmp/jars.tar.gz', shell=True)
+        if 'WARNING' in subprocess.run('md5sum -c /tmp/jars-checksum.chk', capture_output=True, shell=True, check=True):
+            subprocess.run('rm -f /tmp/jars.tar.gz', shell=True, check=True)
             GCPActions().get_from_bucket(args.bucket, 'jars/{0}/jars.tar.gz'.format(args.cluster_name), '/tmp/jars.tar.gz')
-            if 'WARNING' in subprocess.run('md5sum -c /tmp/jars-checksum.chk', capture_output=True, shell=True):
+            if 'WARNING' in subprocess.run('md5sum -c /tmp/jars-checksum.chk', capture_output=True, shell=True, check=True):
                 print("The checksum of jars.tar.gz is mismatched. It could be caused by gcp network issue.")
                 sys.exit(1)
-        subprocess.run('tar -zhxvf /tmp/jars.tar.gz -C {}'.format(dataproc_dir), shell=True)
+        subprocess.run('tar -zhxvf /tmp/jars.tar.gz -C {}'.format(dataproc_dir), shell=True, check=True)
 
     def yarn(self, args, yarn_dir):
         print("Downloading yarn configuration...")
         bucket = self.storage_client.get_bucket(args.bucket)
         list_files = bucket.list_blobs(prefix='{0}/{1}/config/'.format(args.user_name, args.cluster_name))
-        subprocess.run('mkdir -p /tmp/{0}/{1}/config/'.format(args.user_name, args.cluster_name), shell=True)
+        subprocess.run('mkdir -p /tmp/{0}/{1}/config/'.format(args.user_name, args.cluster_name), shell=True, check=True)
         for item in list_files:
             local_file = '/tmp/{0}/{1}/config/{2}'.format(args.user_name, args.cluster_name, item.name.split("/")[-1:][0])
             GCPActions().get_from_bucket(args.bucket, item.name, local_file)
-        subprocess.run('sudo mv /tmp/{0}/{1}/config/* {2}'.format(args.user_name, args.cluster_name, yarn_dir), shell=True)
-        subprocess.run('sudo rm -rf /tmp/{}'.format(args.user_name), shell=True)
+        subprocess.run('sudo mv /tmp/{0}/{1}/config/* {2}'.format(args.user_name, args.cluster_name, yarn_dir), shell=True, check=True)
+        subprocess.run('sudo rm -rf /tmp/{}'.format(args.user_name), shell=True, check=True)
 
     def install_dataproc_spark(self, args):
         print("Installing spark...")
         GCPActions().get_from_bucket(args.bucket, '{0}/{1}/spark.tar.gz'.format(args.user_name, args.cluster_name), '/tmp/spark.tar.gz')
         GCPActions().get_from_bucket(args.bucket, '{0}/{1}/spark-checksum.chk'.format(args.user_name, args.cluster_name), '/tmp/spark-checksum.chk')
-        if 'WARNING' in subprocess.run('md5sum -c /tmp/spark-checksum.chk', capture_output=True, shell=True):
-            subprocess.run('rm -f /tmp/spark.tar.gz', shell=True)
+        if 'WARNING' in subprocess.run('md5sum -c /tmp/spark-checksum.chk', capture_output=True, shell=True, check=True):
+            subprocess.run('rm -f /tmp/spark.tar.gz', shell=True, check=True)
             GCPActions().get_from_bucket(args.bucket, '{0}/{1}/spark.tar.gz'.format(args.user_name, args.cluster_name), '/tmp/spark.tar.gz')
-            if 'WARNING' in subprocess.run('md5sum -c /tmp/spark-checksum.chk', capture_output=True, shell=True):
+            if 'WARNING' in subprocess.run('md5sum -c /tmp/spark-checksum.chk', capture_output=True, shell=True, check=True):
                 print("The checksum of spark.tar.gz is mismatched. It could be caused by gcp network issue.")
                 sys.exit(1)
-        subprocess.run('sudo tar -zhxvf /tmp/spark.tar.gz -C /opt/{0}/{1}/'.format(args.dataproc_version, args.cluster_name), shell=True)
+        subprocess.run('sudo tar -zhxvf /tmp/spark.tar.gz -C /opt/{0}/{1}/'.format(args.dataproc_version, args.cluster_name), shell=True, check=True)
 
     def spark_defaults(self, args):
         spark_def_path = '/opt/{0}/{1}/spark/conf/spark-env.sh'.format(args.dataproc_version, args.cluster_name)
-        subprocess.run(""" sudo bash -c " sed -i '/#/d' {}" """.format(spark_def_path), shell=True)
-        subprocess.run(""" sudo bash -c " sed -i '/^\s*$/d' {}" """.format(spark_def_path), shell=True)
-        subprocess.run(""" sudo bash -c " sed -i 's|/usr/lib/hadoop|/opt/{0}/jars/usr/lib/hadoop|g' {1}" """.format(args.dataproc_version, spark_def_path), shell=True)
-        subprocess.run(""" sudo bash -c " sed -i 's|/etc/hadoop/conf|/opt/{0}/{1}/conf|g' {2}" """.format(args.dataproc_version, args.cluster_name, spark_def_path), shell=True)
-        subprocess.run(""" sudo bash -c " sed -i '/\$HADOOP_HOME\/\*/a SPARK_DIST_CLASSPATH=\\"\$SPARK_DIST_CLASSPATH:\$HADOOP_HOME\/client\/*\\"' {}" """.format(spark_def_path), shell=True)
-        subprocess.run(""" sudo bash -c " sed -i '/\$HADOOP_YARN_HOME\/\*/a SPARK_DIST_CLASSPATH=\\"\$SPARK_DIST_CLASSPATH:\/opt\/jars\/\*\\"' {}" """.format(spark_def_path), shell=True)
-        subprocess.run(""" sudo bash -c " sed -i 's|/hadoop/spark/work|/tmp/hadoop/spark/work|g' {}" """.format(spark_def_path), shell=True)
-        subprocess.run(""" sudo bash -c " sed -i 's|/hadoop/spark/tmp|/tmp/hadoop/spark/tmp|g' {}" """.format(spark_def_path), shell=True)
-        subprocess.run(""" sudo bash -c " sed -i 's/STANDALONE_SPARK_MASTER_HOST.*/STANDALONE_SPARK_MASTER_HOST={0}-m/g' {1}" """.format(args.cluster_name, spark_def_path), shell=True)
-        subprocess.run(""" sudo bash -c " sed -i 's|/hadoop_gcs_connector_metadata_cache|/tmp/hadoop_gcs_connector_metadata_cache|g' /opt/{0}/{1}/conf/core-site.xml" """.format(args.dataproc_version, args.cluster_name), shell=True)
+        subprocess.run(""" sudo bash -c " sed -i '/#/d' {}" """.format(spark_def_path), shell=True, check=True)
+        subprocess.run(""" sudo bash -c " sed -i '/^\s*$/d' {}" """.format(spark_def_path), shell=True, check=True)
+        subprocess.run(""" sudo bash -c " sed -i 's|/usr/lib/hadoop|/opt/{0}/jars/usr/lib/hadoop|g' {1}" """.format(args.dataproc_version, spark_def_path), shell=True, check=True)
+        subprocess.run(""" sudo bash -c " sed -i 's|/etc/hadoop/conf|/opt/{0}/{1}/conf|g' {2}" """.format(args.dataproc_version, args.cluster_name, spark_def_path), shell=True, check=True)
+        subprocess.run(""" sudo bash -c " sed -i '/\$HADOOP_HOME\/\*/a SPARK_DIST_CLASSPATH=\\"\$SPARK_DIST_CLASSPATH:\$HADOOP_HOME\/client\/*\\"' {}" """.format(spark_def_path), shell=True, check=True)
+        subprocess.run(""" sudo bash -c " sed -i '/\$HADOOP_YARN_HOME\/\*/a SPARK_DIST_CLASSPATH=\\"\$SPARK_DIST_CLASSPATH:\/opt\/jars\/\*\\"' {}" """.format(spark_def_path), shell=True, check=True)
+        subprocess.run(""" sudo bash -c " sed -i 's|/hadoop/spark/work|/tmp/hadoop/spark/work|g' {}" """.format(spark_def_path), shell=True, check=True)
+        subprocess.run(""" sudo bash -c " sed -i 's|/hadoop/spark/tmp|/tmp/hadoop/spark/tmp|g' {}" """.format(spark_def_path), shell=True, check=True)
+        subprocess.run(""" sudo bash -c " sed -i 's/STANDALONE_SPARK_MASTER_HOST.*/STANDALONE_SPARK_MASTER_HOST={0}-m/g' {1}" """.format(args.cluster_name, spark_def_path), shell=True, check=True)
+        subprocess.run(""" sudo bash -c " sed -i 's|/hadoop_gcs_connector_metadata_cache|/tmp/hadoop_gcs_connector_metadata_cache|g' /opt/{0}/{1}/conf/core-site.xml" """.format(args.dataproc_version, args.cluster_name), shell=True, check=True)
 
     def remove_kernels(self, notebook_name, dataproc_name, dataproc_version, ssh_user, key_path, computational_name):
         try:
@@ -1211,40 +1211,40 @@ class GCPActions:
             python_version = python_version[0:5]
             livy_port = ''
             livy_path = '/opt/{0}/{1}/livy/'.format(dataproc_version, cluster_name)
-            subprocess.run('echo \"Configuring dataproc path for Zeppelin\"', shell=True)
+            subprocess.run('echo \"Configuring dataproc path for Zeppelin\"', shell=True, check=True)
             subprocess.run('sed -i \"s/^export SPARK_HOME.*/export SPARK_HOME=\/opt\/{0}\/{1}\/spark/\" /opt/zeppelin/conf/zeppelin-env.sh'
-                  .format(dataproc_version, cluster_name), shell=True)
+                  .format(dataproc_version, cluster_name), shell=True, check=True)
             subprocess.run('sed -i \"s/^export HADOOP_CONF_DIR.*/export HADOOP_CONF_DIR=\/opt\/{0}\/{1}\/conf/\" /opt/{0}/{1}/spark/conf/spark-env.sh'
-                  .format(dataproc_version, cluster_name), shell=True)
-            subprocess.run('sed -i "/spark.executorEnv.PYTHONPATH/d" /opt/{0}/{1}/spark/conf/spark-defaults.conf'.format(dataproc_version, cluster_name), shell=True)
-            subprocess.run('sed -i "/spark.yarn.dist.files/d" /opt/{0}/{1}/spark/conf/spark-defaults.conf'.format(dataproc_version, cluster_name), shell=True)
-            subprocess.run('sudo chown {0}:{0} -R /opt/zeppelin/'.format(os_user), shell=True)
-            subprocess.run('sudo systemctl restart zeppelin-notebook.service', shell=True)
+                  .format(dataproc_version, cluster_name), shell=True, check=True)
+            subprocess.run('sed -i "/spark.executorEnv.PYTHONPATH/d" /opt/{0}/{1}/spark/conf/spark-defaults.conf'.format(dataproc_version, cluster_name), shell=True, check=True)
+            subprocess.run('sed -i "/spark.yarn.dist.files/d" /opt/{0}/{1}/spark/conf/spark-defaults.conf'.format(dataproc_version, cluster_name), shell=True, check=True)
+            subprocess.run('sudo chown {0}:{0} -R /opt/zeppelin/'.format(os_user), shell=True, check=True)
+            subprocess.run('sudo systemctl restart zeppelin-notebook.service', shell=True, check=True)
             while not zeppelin_restarted:
-                subprocess.run('sleep 5', shell=True)
-                result = subprocess.run('sudo bash -c "nmap -p 8080 localhost | grep closed > /dev/null" ; echo $?', capture_output=True, shell=True)
+                subprocess.run('sleep 5', shell=True, check=True)
+                result = subprocess.run('sudo bash -c "nmap -p 8080 localhost | grep closed > /dev/null" ; echo $?', capture_output=True, shell=True, check=True)
                 result = result[:1]
                 if result == '1':
                     zeppelin_restarted = True
-            subprocess.run('sleep 5', shell=True)
-            subprocess.run('echo \"Configuring dataproc spark interpreter for Zeppelin\"', shell=True)
+            subprocess.run('sleep 5', shell=True, check=True)
+            subprocess.run('echo \"Configuring dataproc spark interpreter for Zeppelin\"', shell=True, check=True)
             if multiple_clusters == 'true':
                 while not port_number_found:
                     port_free = subprocess.run('sudo bash -c "nmap -p ' + str(default_port) +
-                                      ' localhost | grep closed > /dev/null" ; echo $?', capture_output=True, shell=True)
+                                      ' localhost | grep closed > /dev/null" ; echo $?', capture_output=True, shell=True, check=True)
                     port_free = port_free[:1]
                     if port_free == '0':
                         livy_port = default_port
                         port_number_found = True
                     else:
                         default_port += 1
-                subprocess.run('sudo echo "livy.server.port = {0}" >> {1}conf/livy.conf'.format(str(livy_port), livy_path), shell=True)
-                subprocess.run('sudo echo "livy.spark.master = yarn" >> {}conf/livy.conf'.format(livy_path), shell=True)
+                subprocess.run('sudo echo "livy.server.port = {0}" >> {1}conf/livy.conf'.format(str(livy_port), livy_path), shell=True, check=True)
+                subprocess.run('sudo echo "livy.spark.master = yarn" >> {}conf/livy.conf'.format(livy_path), shell=True, check=True)
                 if os.path.exists('{}conf/spark-blacklist.conf'.format(livy_path)):
-                    subprocess.run('sudo sed -i "s/^/#/g" {}conf/spark-blacklist.conf'.format(livy_path), shell=True)
-                subprocess.run('sudo echo "export SPARK_HOME={0}" >> {1}conf/livy-env.sh'.format(spark_dir, livy_path), shell=True)
-                subprocess.run('sudo echo "export HADOOP_CONF_DIR={0}" >> {1}conf/livy-env.sh'.format(yarn_dir, livy_path), shell=True)
-                subprocess.run('sudo echo "export PYSPARK3_PYTHON=python{0}" >> {1}conf/livy-env.sh'.format(python_version[0:3], livy_path), shell=True)
+                    subprocess.run('sudo sed -i "s/^/#/g" {}conf/spark-blacklist.conf'.format(livy_path), shell=True, check=True)
+                subprocess.run('sudo echo "export SPARK_HOME={0}" >> {1}conf/livy-env.sh'.format(spark_dir, livy_path), shell=True, check=True)
+                subprocess.run('sudo echo "export HADOOP_CONF_DIR={0}" >> {1}conf/livy-env.sh'.format(yarn_dir, livy_path), shell=True, check=True)
+                subprocess.run('sudo echo "export PYSPARK3_PYTHON=python{0}" >> {1}conf/livy-env.sh'.format(python_version[0:3], livy_path), shell=True, check=True)
                 template_file = "/tmp/dataengine-service_interpreter.json"
                 fr = open(template_file, 'r+')
                 text = fr.read()
@@ -1257,17 +1257,17 @@ class GCPActions:
                 for _ in range(5):
                     try:
                         subprocess.run("curl --noproxy localhost -H 'Content-Type: application/json' -X POST -d " +
-                              "@/tmp/dataengine-service_interpreter.json http://localhost:8080/api/interpreter/setting", shell=True)
+                              "@/tmp/dataengine-service_interpreter.json http://localhost:8080/api/interpreter/setting", shell=True, check=True)
                         break
                     except:
-                        subprocess.run('sleep 5', shell=True)
-                subprocess.run('sudo cp /opt/livy-server-cluster.service /etc/systemd/system/livy-server-{}.service'.format(str(livy_port)), shell=True)
-                subprocess.run("sudo sed -i 's|OS_USER|{0}|' /etc/systemd/system/livy-server-{1}.service".format(os_user, str(livy_port)), shell=True)
-                subprocess.run("sudo sed -i 's|LIVY_PATH|{0}|' /etc/systemd/system/livy-server-{1}.service".format(livy_path, str(livy_port)), shell=True)
-                subprocess.run('sudo chmod 644 /etc/systemd/system/livy-server-{}.service'.format(str(livy_port)), shell=True)
-                subprocess.run('sudo systemctl daemon-reload', shell=True)
-                subprocess.run('sudo systemctl enable livy-server-{}'.format(str(livy_port)), shell=True)
-                subprocess.run('sudo systemctl start livy-server-{}'.format(str(livy_port)), shell=True)
+                        subprocess.run('sleep 5', shell=True, check=True)
+                subprocess.run('sudo cp /opt/livy-server-cluster.service /etc/systemd/system/livy-server-{}.service'.format(str(livy_port)), shell=True, check=True)
+                subprocess.run("sudo sed -i 's|OS_USER|{0}|' /etc/systemd/system/livy-server-{1}.service".format(os_user, str(livy_port)), shell=True, check=True)
+                subprocess.run("sudo sed -i 's|LIVY_PATH|{0}|' /etc/systemd/system/livy-server-{1}.service".format(livy_path, str(livy_port)), shell=True, check=True)
+                subprocess.run('sudo chmod 644 /etc/systemd/system/livy-server-{}.service'.format(str(livy_port)), shell=True, check=True)
+                subprocess.run('sudo systemctl daemon-reload', shell=True, check=True)
+                subprocess.run('sudo systemctl enable livy-server-{}'.format(str(livy_port)), shell=True, check=True)
+                subprocess.run('sudo systemctl start livy-server-{}'.format(str(livy_port)), shell=True, check=True)
             else:
                 template_file = "/tmp/dataengine-service_interpreter.json"
                 p_versions = ["2", "{}-dp".format(python_version[:3])]
@@ -1286,11 +1286,11 @@ class GCPActions:
                     for _ in range(5):
                         try:
                             subprocess.run("curl --noproxy localhost -H 'Content-Type: application/json' -X POST -d " +
-                                  "@/tmp/dataproc_spark_py{}_interpreter.json http://localhost:8080/api/interpreter/setting".format(p_version), shell=True)
+                                  "@/tmp/dataproc_spark_py{}_interpreter.json http://localhost:8080/api/interpreter/setting".format(p_version), shell=True, check=True)
                             break
                         except:
-                            subprocess.run('sleep 5', shell=True)
-            subprocess.run('touch /home/{0}/.ensure_dir/dataengine-service_{1}_interpreter_ensured'.format(os_user, cluster_name), shell=True)
+                            subprocess.run('sleep 5', shell=True, check=True)
+            subprocess.run('touch /home/{0}/.ensure_dir/dataengine-service_{1}_interpreter_ensured'.format(os_user, cluster_name), shell=True, check=True)
         except:
             sys.exit(1)
 
@@ -1301,26 +1301,26 @@ class GCPActions:
                 python_version = f.read()
             python_version = python_version[0:5]
             if not os.path.exists('/opt/python/python{}'.format(python_version)):
-                subprocess.run('wget https://www.python.org/ftp/python/{0}/Python-{0}.tgz -O /tmp/Python-{0}.tgz'.format(python_version), shell=True)
-                subprocess.run('tar zxvf /tmp/Python-{}.tgz -C /tmp/'.format(python_version), shell=True)
-                subprocess.run('cd /tmp/Python-{0}; ./configure --prefix=/opt/python/python{0} --with-zlib-dir=/usr/local/lib/ --with-ensurepip=install'.format(python_version), shell=True)
-                subprocess.run('cd /tmp/Python-{}; sudo make altinstall'.format(python_version), shell=True)
-                subprocess.run('cd /tmp/; sudo rm -rf Python-{}/'.format(python_version), shell=True)
-                subprocess.run('sudo -i virtualenv /opt/python/python{}'.format(python_version), shell=True)
+                subprocess.run('wget https://www.python.org/ftp/python/{0}/Python-{0}.tgz -O /tmp/Python-{0}.tgz'.format(python_version), shell=True, check=True)
+                subprocess.run('tar zxvf /tmp/Python-{}.tgz -C /tmp/'.format(python_version), shell=True, check=True)
+                subprocess.run('cd /tmp/Python-{0}; ./configure --prefix=/opt/python/python{0} --with-zlib-dir=/usr/local/lib/ --with-ensurepip=install'.format(python_version), shell=True, check=True)
+                subprocess.run('cd /tmp/Python-{}; sudo make altinstall'.format(python_version), shell=True, check=True)
+                subprocess.run('cd /tmp/; sudo rm -rf Python-{}/'.format(python_version), shell=True, check=True)
+                subprocess.run('sudo -i virtualenv /opt/python/python{}'.format(python_version), shell=True, check=True)
                 venv_command = 'source /opt/python/python{}/bin/activate'.format(python_version)
                 pip_command = '/opt/python/python{0}/bin/pip{1}'.format(python_version, python_version[:3])
-                subprocess.run('{0} && sudo -i {1} install -U pip==9.0.3'.format(venv_command, pip_command), shell=True)
-                subprocess.run('{0} && sudo -i {1} install pyzmq==17.0.0'.format(venv_command, pip_command), shell=True)
-                subprocess.run('{0} && sudo -i {1} install ipython ipykernel --no-cache-dir'.format(venv_command, pip_command), shell=True)
+                subprocess.run('{0} && sudo -i {1} install -U pip==9.0.3'.format(venv_command, pip_command), shell=True, check=True)
+                subprocess.run('{0} && sudo -i {1} install pyzmq==17.0.0'.format(venv_command, pip_command), shell=True, check=True)
+                subprocess.run('{0} && sudo -i {1} install ipython ipykernel --no-cache-dir'.format(venv_command, pip_command), shell=True, check=True)
                 subprocess.run('{0} && sudo -i {1} install boto boto3 NumPy=={2} SciPy Matplotlib pandas Sympy Pillow sklearn --no-cache-dir'
-                      .format(venv_command, pip_command, numpy_version), shell=True)
+                      .format(venv_command, pip_command, numpy_version), shell=True, check=True)
                 if application == 'deeplearning':
-                    subprocess.run('{0} && sudo -i {1} install mxnet-cu80 opencv-python keras Theano --no-cache-dir'.format(venv_command, pip_command), shell=True)
+                    subprocess.run('{0} && sudo -i {1} install mxnet-cu80 opencv-python keras Theano --no-cache-dir'.format(venv_command, pip_command), shell=True, check=True)
                     python_without_dots = python_version.replace('.', '')
                     subprocess.run('{0} && sudo -i {1} install  https://cntk.ai/PythonWheel/GPU/cntk-2.0rc3-cp{2}-cp{2}m-linux_x86_64.whl --no-cache-dir'
-                          .format(venv_command, pip_command, python_without_dots[:2]), shell=True)
-                subprocess.run('sudo rm -rf /usr/bin/python{}-dp'.format(python_version[0:3]), shell=True)
-                subprocess.run('sudo ln -fs /opt/python/python{0}/bin/python{1} /usr/bin/python{1}-dp'.format(python_version, python_version[0:3]), shell=True)
+                          .format(venv_command, pip_command, python_without_dots[:2]), shell=True, check=True)
+                subprocess.run('sudo rm -rf /usr/bin/python{}-dp'.format(python_version[0:3]), shell=True, check=True)
+                subprocess.run('sudo ln -fs /opt/python/python{0}/bin/python{1} /usr/bin/python{1}-dp'.format(python_version, python_version[0:3]), shell=True, check=True)
         except Exception as err:
             logging.info(
                 "Unable to install python: " + str(err) + "\n Traceback: " + traceback.print_exc(
@@ -1508,32 +1508,32 @@ def remove_dataengine_kernels(notebook_name, os_user, key_path, cluster_name):
 
 
 def install_dataengine_spark(cluster_name, spark_link, spark_version, hadoop_version, cluster_dir, os_user, datalake_enabled):
-    subprocess.run('wget ' + spark_link + ' -O /tmp/' + cluster_name + '/spark-' + spark_version + '-bin-hadoop' + hadoop_version + '.tgz', shell=True)
-    subprocess.run('tar -zxvf /tmp/' + cluster_name + '/spark-' + spark_version + '-bin-hadoop' + hadoop_version + '.tgz -C /opt/', shell=True)
-    subprocess.run('mv /opt/spark-' + spark_version + '-bin-hadoop' + hadoop_version + ' ' + cluster_dir + 'spark/', shell=True)
-    subprocess.run('chown -R ' + os_user + ':' + os_user + ' ' + cluster_dir + 'spark/', shell=True)
+    subprocess.run('wget ' + spark_link + ' -O /tmp/' + cluster_name + '/spark-' + spark_version + '-bin-hadoop' + hadoop_version + '.tgz', shell=True, check=True)
+    subprocess.run('tar -zxvf /tmp/' + cluster_name + '/spark-' + spark_version + '-bin-hadoop' + hadoop_version + '.tgz -C /opt/', shell=True, check=True)
+    subprocess.run('mv /opt/spark-' + spark_version + '-bin-hadoop' + hadoop_version + ' ' + cluster_dir + 'spark/', shell=True, check=True)
+    subprocess.run('chown -R ' + os_user + ':' + os_user + ' ' + cluster_dir + 'spark/', shell=True, check=True)
 
 
 def configure_dataengine_spark(cluster_name, jars_dir, cluster_dir, datalake_enabled, spark_configs=''):
     subprocess.run("jar_list=`find {0} -name '*.jar' | tr '\\n' ',' | sed 's/,$//'` ; echo \"spark.jars $jar_list\" >> \
-          /tmp/{1}/notebook_spark-defaults_local.conf".format(jars_dir, cluster_name), shell=True)
+          /tmp/{1}/notebook_spark-defaults_local.conf".format(jars_dir, cluster_name), shell=True, check=True)
     if os.path.exists('{0}spark/conf/spark-defaults.conf'.format(cluster_dir)):
         additional_spark_properties = subprocess.run('diff --changed-group-format="%>" --unchanged-group-format="" '
                                             '/tmp/{0}/notebook_spark-defaults_local.conf '
                                             '{1}spark/conf/spark-defaults.conf | grep -v "^#"'.format(
-                                             cluster_name, cluster_dir), capture_output=True, shell=True)
+                                             cluster_name, cluster_dir), capture_output=True, shell=True, check=True)
         for property in additional_spark_properties.split('\n'):
-            subprocess.run('echo "{0}" >> /tmp/{1}/notebook_spark-defaults_local.conf'.format(property, cluster_name), shell=True)
+            subprocess.run('echo "{0}" >> /tmp/{1}/notebook_spark-defaults_local.conf'.format(property, cluster_name), shell=True, check=True)
     if os.path.exists('{0}'.format(cluster_dir)):
         subprocess.run('cp -f /tmp/{0}/notebook_spark-defaults_local.conf  {1}spark/conf/spark-defaults.conf'.format(cluster_name,
-                                                                                                        cluster_dir), shell=True)
-    subprocess.run('cp -f /opt/spark/conf/core-site.xml {}spark/conf/'.format(cluster_dir), shell=True)
+                                                                                                        cluster_dir), shell=True, check=True)
+    subprocess.run('cp -f /opt/spark/conf/core-site.xml {}spark/conf/'.format(cluster_dir), shell=True, check=True)
     if spark_configs and os.path.exists('{0}'.format(cluster_dir)):
         datalab_header = subprocess.run('cat /tmp/{0}/notebook_spark-defaults_local.conf | grep "^#"'.format(cluster_name),
-                               capture_output=True, shell=True)
+                               capture_output=True, shell=True, check=True)
         spark_configurations = ast.literal_eval(spark_configs)
         new_spark_defaults = list()
-        spark_defaults = subprocess.run('cat {0}spark/conf/spark-defaults.conf'.format(cluster_dir), capture_output=True, shell=True)
+        spark_defaults = subprocess.run('cat {0}spark/conf/spark-defaults.conf'.format(cluster_dir), capture_output=True, shell=True, check=True)
         current_spark_properties = spark_defaults.split('\n')
         for param in current_spark_properties:
             if param.split(' ')[0] != '#':
@@ -1546,11 +1546,11 @@ def configure_dataengine_spark(cluster_name, jars_dir, cluster_dir, datalake_ena
                                 new_spark_defaults.append(property + ' ' + config['Properties'][property])
                 new_spark_defaults.append(param)
         new_spark_defaults = set(new_spark_defaults)
-        subprocess.run("echo '{0}' > {1}/spark/conf/spark-defaults.conf".format(datalab_header, cluster_dir), shell=True)
+        subprocess.run("echo '{0}' > {1}/spark/conf/spark-defaults.conf".format(datalab_header, cluster_dir), shell=True, check=True)
         for prop in new_spark_defaults:
             prop = prop.rstrip()
-            subprocess.run('echo "{0}" >> {1}/spark/conf/spark-defaults.conf'.format(prop, cluster_dir), shell=True)
-        subprocess.run('sed -i "/^\s*$/d" {0}/spark/conf/spark-defaults.conf'.format(cluster_dir), shell=True)
+            subprocess.run('echo "{0}" >> {1}/spark/conf/spark-defaults.conf'.format(prop, cluster_dir), shell=True, check=True)
+        subprocess.run('sed -i "/^\s*$/d" {0}/spark/conf/spark-defaults.conf'.format(cluster_dir), shell=True, check=True)
 
 
 def find_des_jars(all_jars, des_path):
diff --git a/infrastructure-provisioning/src/general/lib/gcp/meta_lib.py b/infrastructure-provisioning/src/general/lib/gcp/meta_lib.py
index 9139188..e0596cc 100644
--- a/infrastructure-provisioning/src/general/lib/gcp/meta_lib.py
+++ b/infrastructure-provisioning/src/general/lib/gcp/meta_lib.py
@@ -725,7 +725,7 @@ class GCPMeta:
                 '/response/.emr_creating_' + os.environ['exploratory_name']) or self.get_not_configured_dataproc(
                 os.environ['notebook_instance_name']):
             with hide('stderr', 'running', 'warnings'):
-                subprocess.run("echo 'Some Dataproc cluster is still being created/terminated, waiting..'", shell=True)
+                subprocess.run("echo 'Some Dataproc cluster is still being created/terminated, waiting..'", shell=True, check=True)
             time.sleep(60)
             self.dataproc_waiter(labels)
         else:
diff --git a/infrastructure-provisioning/src/general/lib/os/debian/common_lib.py b/infrastructure-provisioning/src/general/lib/os/debian/common_lib.py
index 1223131..ec33468 100644
--- a/infrastructure-provisioning/src/general/lib/os/debian/common_lib.py
+++ b/infrastructure-provisioning/src/general/lib/os/debian/common_lib.py
@@ -85,12 +85,12 @@ def manage_pkg(command, environment, requisites):
                             traceback.print_exc()
                             append_result("Failed to manage_pkgs", str(err))
                 elif environment == 'local':
-                    if subprocess.run('sudo pgrep "^apt" -a && echo "busy" || echo "ready"', capture_output=True, shell=True) == 'busy':
+                    if subprocess.run('sudo pgrep "^apt" -a && echo "busy" || echo "ready"', capture_output=True, shell=True, check=True) == 'busy':
                         counter += 1
                         time.sleep(10)
                     else:
                         allow = True
-                        subprocess.run('sudo apt-get {0} {1}'.format(command, requisites), capture_output=True, shell=True)
+                        subprocess.run('sudo apt-get {0} {1}'.format(command, requisites), capture_output=True, shell=True, check=True)
                 else:
                     print('Wrong environment')
     except:
@@ -155,7 +155,7 @@ def find_java_path_remote():
 
 
 def find_java_path_local():
-    java_path = subprocess.run("sh -c \"update-alternatives --query java | grep 'Value: ' | grep -o '/.*/jre'\"", capture_output=True, shell=True)
+    java_path = subprocess.run("sh -c \"update-alternatives --query java | grep 'Value: ' | grep -o '/.*/jre'\"", capture_output=True, shell=True, check=True)
     return java_path
 
 
diff --git a/infrastructure-provisioning/src/general/lib/os/fab.py b/infrastructure-provisioning/src/general/lib/os/fab.py
index 65c19d250..fd34ce9 100644
--- a/infrastructure-provisioning/src/general/lib/os/fab.py
+++ b/infrastructure-provisioning/src/general/lib/os/fab.py
@@ -52,7 +52,7 @@ def ensure_pip(requisites):
 
 
 def dataengine_dir_prepare(cluster_dir):
-    subprocess.run('mkdir -p ' + cluster_dir, shell=True)
+    subprocess.run('mkdir -p ' + cluster_dir, shell=True, check=True)
 
 
 def install_pip_pkg(requisites, pip_version, lib_group):
@@ -143,20 +143,20 @@ def id_generator(size=10, chars=string.digits + string.ascii_letters):
 
 def ensure_dataengine_tensorflow_jars(jars_dir):
     subprocess.run('wget https://dl.bintray.com/spark-packages/maven/tapanalyticstoolkit/spark-tensorflow-connector/1.0.0-s_2.11/spark-tensorflow-connector-1.0.0-s_2.11.jar \
-         -O {}spark-tensorflow-connector-1.0.0-s_2.11.jar'.format(jars_dir), shell=True)
+         -O {}spark-tensorflow-connector-1.0.0-s_2.11.jar'.format(jars_dir), shell=True, check=True)
 
 
 def prepare(dataengine_service_dir, yarn_dir):
-    subprocess.run('mkdir -p ' + dataengine_service_dir, shell=True)
-    subprocess.run('mkdir -p ' + yarn_dir, shell=True)
-    subprocess.run('sudo mkdir -p /opt/python/', shell=True)
+    subprocess.run('mkdir -p ' + dataengine_service_dir, shell=True, check=True)
+    subprocess.run('mkdir -p ' + yarn_dir, shell=True, check=True)
+    subprocess.run('sudo mkdir -p /opt/python/', shell=True, check=True)
     result = os.path.exists(dataengine_service_dir + 'usr/')
     return result
 
 
 def configuring_notebook(dataengine_service_version):
     jars_path = '/opt/' + dataengine_service_version + '/jars/'
-    subprocess.run("""sudo bash -c "find """ + jars_path + """ -name '*netty*' | xargs rm -f" """, shell=True)
+    subprocess.run("""sudo bash -c "find """ + jars_path + """ -name '*netty*' | xargs rm -f" """, shell=True, check=True)
 
 
 def append_result(error, exception=''):
@@ -376,7 +376,7 @@ def ensure_py3spark_local_kernel(os_user, py3spark_local_path_dir, templates_dir
 def pyspark_kernel(kernels_dir, dataengine_service_version, cluster_name, spark_version, bucket, user_name, region, os_user='',
                    application='', pip_mirror='', numpy_version='1.14.3'):
     spark_path = '/opt/{0}/{1}/spark/'.format(dataengine_service_version, cluster_name)
-    subprocess.run('mkdir -p {0}pyspark_{1}/'.format(kernels_dir, cluster_name), shell=True)
+    subprocess.run('mkdir -p {0}pyspark_{1}/'.format(kernels_dir, cluster_name), shell=True, check=True)
     kernel_path = '{0}pyspark_{1}/kernel.json'.format(kernels_dir, cluster_name)
     template_file = "/tmp/pyspark_dataengine-service_template.json"
     with open(template_file, 'r') as f:
@@ -390,16 +390,16 @@ def pyspark_kernel(kernels_dir, dataengine_service_version, cluster_name, spark_
     text = text.replace('DATAENGINE-SERVICE_VERSION', dataengine_service_version)
     with open(kernel_path, 'w') as f:
         f.write(text)
-    subprocess.run('touch /tmp/kernel_var.json', shell=True)
+    subprocess.run('touch /tmp/kernel_var.json', shell=True, check=True)
     subprocess.run("PYJ=`find /opt/{0}/{1}/spark/ -name '*py4j*.zip' | tr '\\n' ':' | sed 's|:$||g'`; cat {2} | sed 's|PY4J|'$PYJ'|g' | sed \'/PYTHONPATH\"\:/s|\(.*\)\"|\\1/home/{3}/caffe/python:/home/{3}/pytorch/build:\"|\' > /tmp/kernel_var.json".
-          format(dataengine_service_version, cluster_name, kernel_path, os_user), shell=True)
-    subprocess.run('sudo mv /tmp/kernel_var.json ' + kernel_path, shell=True)
+          format(dataengine_service_version, cluster_name, kernel_path, os_user), shell=True, check=True)
+    subprocess.run('sudo mv /tmp/kernel_var.json ' + kernel_path, shell=True, check=True)
     get_cluster_python_version(region, bucket, user_name, cluster_name)
     with open('/tmp/python_version') as f:
         python_version = f.read()
     if python_version != '\n':
         installing_python(region, bucket, user_name, cluster_name, application, pip_mirror, numpy_version)
-        subprocess.run('mkdir -p {0}py3spark_{1}/'.format(kernels_dir, cluster_name), shell=True)
+        subprocess.run('mkdir -p {0}py3spark_{1}/'.format(kernels_dir, cluster_name), shell=True, check=True)
         kernel_path = '{0}py3spark_{1}/kernel.json'.format(kernels_dir, cluster_name)
         template_file = "/tmp/pyspark_dataengine-service_template.json"
         with open(template_file, 'r') as f:
@@ -414,10 +414,10 @@ def pyspark_kernel(kernels_dir, dataengine_service_version, cluster_name, spark_
         text = text.replace('DATAENGINE-SERVICE_VERSION', dataengine_service_version)
         with open(kernel_path, 'w') as f:
             f.write(text)
-        subprocess.run('touch /tmp/kernel_var.json', shell=True)
+        subprocess.run('touch /tmp/kernel_var.json', shell=True, check=True)
         subprocess.run("PYJ=`find /opt/{0}/{1}/spark/ -name '*py4j*.zip' | tr '\\n' ':' | sed 's|:$||g'`; cat {2} | sed 's|PY4J|'$PYJ'|g' | sed \'/PYTHONPATH\"\:/s|\(.*\)\"|\\1/home/{3}/caffe/python:/home/{3}/pytorch/build:\"|\' > /tmp/kernel_var.json"
-              .format(dataengine_service_version, cluster_name, kernel_path, os_user), shell=True)
-        subprocess.run('sudo mv /tmp/kernel_var.json {}'.format(kernel_path), shell=True)
+              .format(dataengine_service_version, cluster_name, kernel_path, os_user), shell=True, check=True)
+        subprocess.run('sudo mv /tmp/kernel_var.json {}'.format(kernel_path), shell=True, check=True)
 
 
 def ensure_ciphers():
@@ -906,7 +906,7 @@ def update_zeppelin_interpreters(multiple_clusters, r_enabled, interpreter_mode=
         else:
             with open(interpreters_config, 'w') as f:
                 f.write(json.dumps(data, indent=2))
-            subprocess.run('sudo systemctl restart zeppelin-notebook', shell=True)
+            subprocess.run('sudo systemctl restart zeppelin-notebook', shell=True, check=True)
     except Exception as err:
         print('Failed to update Zeppelin interpreters', str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/lib/os/redhat/common_lib.py b/infrastructure-provisioning/src/general/lib/os/redhat/common_lib.py
index be06b19..bd3a115 100644
--- a/infrastructure-provisioning/src/general/lib/os/redhat/common_lib.py
+++ b/infrastructure-provisioning/src/general/lib/os/redhat/common_lib.py
@@ -45,12 +45,12 @@ def manage_pkg(command, environment, requisites):
                         allow = True
                         conn.sudo('yum {0} {1}'.format(command, requisites))
                 elif environment == 'local':
-                    if subprocess.run('sudo pgrep yum -a && echo "busy" || echo "ready"', capture_output=True, shell=True) == 'busy':
+                    if subprocess.run('sudo pgrep yum -a && echo "busy" || echo "ready"', capture_output=True, shell=True, check=True) == 'busy':
                         counter += 1
                         time.sleep(10)
                     else:
                         allow = True
-                        subprocess.run('sudo yum {0} {1}'.format(command, requisites), capture_output=True, shell=True)
+                        subprocess.run('sudo yum {0} {1}'.format(command, requisites), capture_output=True, shell=True, check=True)
                 else:
                     print('Wrong environment')
     except:
@@ -101,7 +101,7 @@ def find_java_path_remote():
 
 
 def find_java_path_local():
-    java_path = subprocess.run("alternatives --display java | grep 'slave jre: ' | awk '{print $3}'", capture_output=True, shell=True)
+    java_path = subprocess.run("alternatives --display java | grep 'slave jre: ' | awk '{print $3}'", capture_output=True, shell=True, check=True)
     return java_path
 
 
diff --git a/infrastructure-provisioning/src/general/lib/os/redhat/notebook_lib.py b/infrastructure-provisioning/src/general/lib/os/redhat/notebook_lib.py
index 5a5f9c1..b1e4a6a 100644
--- a/infrastructure-provisioning/src/general/lib/os/redhat/notebook_lib.py
+++ b/infrastructure-provisioning/src/general/lib/os/redhat/notebook_lib.py
@@ -291,16 +291,16 @@ def install_livy_dependencies(os_user):
 
 def install_maven_emr(os_user):
     if not os.path.exists('/home/' + os_user + '/.ensure_dir/maven_ensured'):
-        subprocess.run('wget http://apache.volia.net/maven/maven-3/3.3.9/binaries/apache-maven-3.3.9-bin.tar.gz -O /tmp/maven.tar.gz', shell=True)
-        subprocess.run('sudo tar -zxvf /tmp/maven.tar.gz -C /opt/', shell=True)
-        subprocess.run('sudo ln -fs /opt/apache-maven-3.3.9/bin/mvn /usr/bin/mvn', shell=True)
-        subprocess.run('touch /home/' + os_user + '/.ensure_dir/maven_ensured', shell=True)
+        subprocess.run('wget http://apache.volia.net/maven/maven-3/3.3.9/binaries/apache-maven-3.3.9-bin.tar.gz -O /tmp/maven.tar.gz', shell=True, check=True)
+        subprocess.run('sudo tar -zxvf /tmp/maven.tar.gz -C /opt/', shell=True, check=True)
+        subprocess.run('sudo ln -fs /opt/apache-maven-3.3.9/bin/mvn /usr/bin/mvn', shell=True, check=True)
+        subprocess.run('touch /home/' + os_user + '/.ensure_dir/maven_ensured', shell=True, check=True)
 
 
 def install_livy_dependencies_emr(os_user):
     if not os.path.exists('/home/' + os_user + '/.ensure_dir/livy_dependencies_ensured'):
-        subprocess.run('sudo -i pip3.5 install cloudpickle requests requests-kerberos flake8 flaky pytest --no-cache-dir', shell=True)
-        subprocess.run('touch /home/' + os_user + '/.ensure_dir/livy_dependencies_ensured', shell=True)
+        subprocess.run('sudo -i pip3.5 install cloudpickle requests requests-kerberos flake8 flaky pytest --no-cache-dir', shell=True, check=True)
+        subprocess.run('touch /home/' + os_user + '/.ensure_dir/livy_dependencies_ensured', shell=True, check=True)
 
 
 def install_nodejs(os_user):
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_notebook_configure_dataengine-service.py b/infrastructure-provisioning/src/general/scripts/aws/common_notebook_configure_dataengine-service.py
index b466cc1..4d542b4 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_notebook_configure_dataengine-service.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_notebook_configure_dataengine-service.py
@@ -94,7 +94,7 @@ if __name__ == "__main__":
                     os.environ['conf_os_user'], edge_instance_hostname, '3128', os.environ['notebook_scala_version'],
                     os.environ['application'], os.environ['conf_pypi_mirror'])
         try:
-            subprocess.run("~/scripts/{}_{}.py {}".format(application, 'install_dataengine-service_kernels', params), shell=True)
+            subprocess.run("~/scripts/{}_{}.py {}".format(application, 'install_dataengine-service_kernels', params), shell=True, check=True)
             datalab.actions_lib.remove_emr_tag(notebook_config['cluster_id'], ['State'])
             datalab.actions_lib.tag_emr_volume(notebook_config['cluster_id'], notebook_config['cluster_name'],
                                                os.environ['conf_tag_resource_id'])
@@ -116,7 +116,7 @@ if __name__ == "__main__":
                     notebook_config['key_path'],
                     os.environ['conf_os_user'])
         try:
-            subprocess.run("~/scripts/{0}.py {1}".format('common_configure_spark', params), shell=True)
+            subprocess.run("~/scripts/{0}.py {1}".format('common_configure_spark', params), shell=True, check=True)
             datalab.actions_lib.remove_emr_tag(notebook_config['cluster_id'], ['State'])
             datalab.actions_lib.tag_emr_volume(notebook_config['cluster_id'], notebook_config['cluster_name'],
                                                os.environ['conf_tag_resource_id'])
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_notebook_configure_dataengine.py b/infrastructure-provisioning/src/general/scripts/aws/common_notebook_configure_dataengine.py
index 7de41c8..adf6e07 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_notebook_configure_dataengine.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_notebook_configure_dataengine.py
@@ -102,7 +102,7 @@ if __name__ == "__main__":
                    notebook_config['spark_master_url'], notebook_config['key_path'],
                    notebook_config['notebook_ip'], notebook_config['spark_master_ip'])
         try:
-            subprocess.run("~/scripts/{}_{}.py {}".format(os.environ['application'], 'install_dataengine_kernels', params), shell=True)
+            subprocess.run("~/scripts/{}_{}.py {}".format(os.environ['application'], 'install_dataengine_kernels', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -123,7 +123,7 @@ if __name__ == "__main__":
                     notebook_config['datalab_ssh_user'],
                     notebook_config['cluster_name'])
         try:
-            subprocess.run("~/scripts/{0}.py {1}".format('common_configure_spark', params), shell=True)
+            subprocess.run("~/scripts/{0}.py {1}".format('common_configure_spark', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_prepare_notebook.py b/infrastructure-provisioning/src/general/scripts/aws/common_prepare_notebook.py
index edc4a89..a823535 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_prepare_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_prepare_notebook.py
@@ -148,7 +148,7 @@ if __name__ == "__main__":
             notebook_config['tag_name'], notebook_config['instance_name'], instance_class,
             os.environ['notebook_disk_size'], notebook_config['primary_disk_size'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_instance', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_instance', params), shell=True, check=True)
 
         except:
             traceback.print_exc()
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_reupload_key.py b/infrastructure-provisioning/src/general/scripts/aws/common_reupload_key.py
index 324cd20..d80f53a 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_reupload_key.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_reupload_key.py
@@ -46,7 +46,7 @@ if __name__ == "__main__":
         params = "--user {} --hostname {} --keyfile '{}' --additional_config '{}'".format(
             args.os_user, ip, args.keyfile, args.additional_config)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except Exception as err:
             print('Error: {0}'.format(err))
             sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_start_notebook.py b/infrastructure-provisioning/src/general/scripts/aws/common_start_notebook.py
index ab0bc8a..22c0d5e 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_start_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_start_notebook.py
@@ -70,7 +70,7 @@ if __name__ == "__main__":
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(os.environ['conf_os_user'], notebook_config['notebook_ip'], notebook_config['keyfile'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True, check=True)
         except Exception as err:
             traceback.print_exc()
             datalab.fab.append_result("Failed to setup git credentials.", str(err))
@@ -84,7 +84,7 @@ if __name__ == "__main__":
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(os.environ['conf_os_user'], notebook_config['notebook_ip'], notebook_config['keyfile'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('update_inactivity_on_start', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('update_inactivity_on_start', params), shell=True, check=True)
         except Exception as err:
             traceback.print_exc()
             datalab.fab.append_result("Failed to update last activity time.", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_configure.py b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_configure.py
index 92240ba..d1a080f 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_configure.py
@@ -49,7 +49,7 @@ def configure_dataengine_service(instance, emr_conf):
             (emr_conf['instance_ip'], emr_conf['key_path'], emr_conf['initial_user'],
              emr_conf['os_user'], emr_conf['sudo_group'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -67,7 +67,7 @@ def configure_dataengine_service(instance, emr_conf):
             .format(emr_conf['instance_ip'], emr_conf['cluster_name'], emr_conf['key_path'],
                     json.dumps(additional_config), emr_conf['os_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -128,7 +128,7 @@ def configure_dataengine_service(instance, emr_conf):
                     emr_conf['exploratory_name'],
                     json.dumps(additional_info))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
@@ -144,7 +144,7 @@ def configure_dataengine_service(instance, emr_conf):
         params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
             emr_conf['instance_ip'], emr_conf['key_path'], json.dumps(additional_config), emr_conf['os_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_install_libs.py b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_install_libs.py
index 25c195d..f47e231 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_install_libs.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_install_libs.py
@@ -40,7 +40,7 @@ def install_libs(instance, data_engine):
                 data_engine['keyfile'], data_engine['libs'])
     try:
         # Run script to install additional libs
-        subprocess.run("~/scripts/{}.py {}".format('install_additional_libs', params), shell=True)
+        subprocess.run("~/scripts/{}.py {}".format('install_additional_libs', params), shell=True, check=True)
     except:
         traceback.print_exc()
         raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_list_libs.py b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_list_libs.py
index 3302458..967fc6f 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_list_libs.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_list_libs.py
@@ -60,7 +60,7 @@ if __name__ == "__main__":
             .format(data_engine['os_user'], data_engine['master_ip'], data_engine['keyfile'], data_engine['group_name'])
         try:
             # Run script to get available libs
-            subprocess.run("~/scripts/{}.py {}".format('get_list_available_pkgs', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('get_list_available_pkgs', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_prepare.py b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_prepare.py
index aa613fe..b6def63 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_prepare.py
@@ -161,7 +161,7 @@ if __name__ == "__main__":
 
     try:
         datalab.meta_lib.emr_waiter(emr_conf['tag_name'], os.environ['notebook_instance_name'])
-        subprocess.run('touch /response/.emr_creating_{}'.format(emr_conf['exploratory_name']), shell=True)
+        subprocess.run('touch /response/.emr_creating_{}'.format(emr_conf['exploratory_name']), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         datalab.fab.append_result("EMR waiter fail.", str(err))
@@ -248,7 +248,7 @@ if __name__ == "__main__":
                 os.environ['conf_additional_tags'] = 'project_tag:{0};endpoint_tag:{1}'.format(emr_conf['project_tag'],
                                                                                                emr_conf['endpoint_tag'])
             print('Additional tags will be added: {}'.format(os.environ['conf_additional_tags']))
-            subprocess.run("~/scripts/{}.py {}".format('common_create_security_group', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_security_group', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -256,7 +256,7 @@ if __name__ == "__main__":
         datalab.fab.append_result("Failed to create sg.", str(err))
         sys.exit(1)
 
-    subprocess.run("echo Waiting for changes to propagate; sleep 10", shell=True)
+    subprocess.run("echo Waiting for changes to propagate; sleep 10", shell=True, check=True)
 
     try:
         logging.info('[Creating EMR Cluster]')
@@ -314,10 +314,10 @@ if __name__ == "__main__":
             raise Exception
         cluster_name = emr_conf['cluster_name']
         keyfile_name = "{}{}.pem".format(os.environ['conf_key_dir'], emr_conf['key_name'])
-        subprocess.run('rm /response/.emr_creating_{}'.format(emr_conf['exploratory_name']), shell=True)
+        subprocess.run('rm /response/.emr_creating_{}'.format(emr_conf['exploratory_name']), shell=True, check=True)
     except Exception as err:
         datalab.fab.append_result("Failed to create EMR Cluster.", str(err))
-        subprocess.run('rm /response/.emr_creating_{}'.format(emr_conf['exploratory_name']), shell=True)
+        subprocess.run('rm /response/.emr_creating_{}'.format(emr_conf['exploratory_name']), shell=True, check=True)
         emr_id = datalab.meta_lib.get_emr_id_by_name(emr_conf['cluster_name'])
         datalab.actions_lib.terminate_emr(emr_id)
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/dataengine_configure.py b/infrastructure-provisioning/src/general/scripts/aws/dataengine_configure.py
index fb6627e..7995824 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/dataengine_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/dataengine_configure.py
@@ -44,7 +44,7 @@ def configure_slave(slave_number, data_engine):
             data_engine['initial_user'], data_engine['datalab_ssh_user'], data_engine['sudo_group'])
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -59,7 +59,7 @@ def configure_slave(slave_number, data_engine):
         params = '--hostname {} --keyfile {} --os_user {} --application {}' \
             .format(slave_hostname, keyfile_name, data_engine['datalab_ssh_user'], os.environ['application'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_clean_instance', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_clean_instance', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -76,7 +76,7 @@ def configure_slave(slave_number, data_engine):
             .format(slave_hostname, slave_name, keyfile_name, json.dumps(additional_config),
                     data_engine['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -92,7 +92,7 @@ def configure_slave(slave_number, data_engine):
             format(slave_hostname, keyfile_name, data_engine['datalab_ssh_user'], data_engine['region'],
                    edge_instance_private_ip)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -111,7 +111,7 @@ def configure_slave(slave_number, data_engine):
                    os.environ['notebook_scala_version'], os.environ['notebook_r_mirror'], master_node_hostname,
                    'slave')
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_dataengine', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_dataengine', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -127,7 +127,7 @@ def configure_slave(slave_number, data_engine):
         params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
             slave_hostname, keyfile_name, json.dumps(additional_config), data_engine['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -228,7 +228,7 @@ if __name__ == "__main__":
             data_engine['initial_user'], data_engine['datalab_ssh_user'], data_engine['sudo_group'])
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -243,7 +243,7 @@ if __name__ == "__main__":
         params = '--hostname {} --keyfile {} --os_user {} --application {}' \
             .format(master_node_hostname, keyfile_name, data_engine['datalab_ssh_user'], os.environ['application'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_clean_instance', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_clean_instance', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -260,7 +260,7 @@ if __name__ == "__main__":
             .format(master_node_hostname, data_engine['master_node_name'], keyfile_name, json.dumps(additional_config),
                     data_engine['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -276,7 +276,7 @@ if __name__ == "__main__":
             format(master_node_hostname, keyfile_name, data_engine['datalab_ssh_user'], data_engine['region'],
                    edge_instance_private_ip)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -292,7 +292,7 @@ if __name__ == "__main__":
         params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
             master_node_hostname, keyfile_name, json.dumps(additional_config), data_engine['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -311,7 +311,7 @@ if __name__ == "__main__":
                    os.environ['notebook_scala_version'], os.environ['notebook_r_mirror'], master_node_hostname,
                    'master')
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_dataengine', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_dataengine', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -363,7 +363,7 @@ if __name__ == "__main__":
                     data_engine['exploratory_name'],
                     json.dumps(additional_info))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/aws/dataengine_prepare.py b/infrastructure-provisioning/src/general/scripts/aws/dataengine_prepare.py
index 313cf6f..3919513 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/dataengine_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/dataengine_prepare.py
@@ -157,7 +157,7 @@ if __name__ == "__main__":
                     data_engine['notebook_dataengine_role_profile_name'], data_engine['tag_name'],
                     data_engine['master_node_name'], data_engine['primary_disk_size'], data_engine['instance_class'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_instance', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_instance', params), shell=True, check=True)
             data_engine['master_id'] = datalab.meta_lib.get_instance_by_name(data_engine['tag_name'],
                                                                              data_engine['master_node_name'])
             datalab.actions_lib.create_tag(data_engine['master_id'], data_engine['cluster_nodes_tag'], False)
@@ -189,7 +189,7 @@ if __name__ == "__main__":
                         data_engine['notebook_dataengine_role_profile_name'], data_engine['tag_name'],
                         slave_name, data_engine['primary_disk_size'], data_engine['instance_class'])
             try:
-                subprocess.run("~/scripts/{}.py {}".format('common_create_instance', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('common_create_instance', params), shell=True, check=True)
                 data_engine['slave_id'] = datalab.meta_lib.get_instance_by_name(data_engine['tag_name'], slave_name)
                 datalab.actions_lib.create_tag(data_engine['slave_id'], data_engine['cluster_nodes_tag'], False)
                 datalab.actions_lib.create_tag(data_engine['slave_id'], data_engine['cluster_nodes_resource_tag'],
diff --git a/infrastructure-provisioning/src/general/scripts/aws/dataengine_start.py b/infrastructure-provisioning/src/general/scripts/aws/dataengine_start.py
index 223617c..e8b3ecd 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/dataengine_start.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/dataengine_start.py
@@ -97,7 +97,7 @@ if __name__ == "__main__":
             .format(os.environ['conf_os_user'], data_engine['notebook_ip'], data_engine['keyfile'],
                     data_engine['computational_ip'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('update_inactivity_on_start', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('update_inactivity_on_start', params), shell=True, check=True)
         except Exception as err:
             traceback.print_exc()
             datalab.fab.append_result("Failed to update last activity time.", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/aws/deeplearning_configure.py b/infrastructure-provisioning/src/general/scripts/aws/deeplearning_configure.py
index 9c5ac35..03424dc 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/deeplearning_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/deeplearning_configure.py
@@ -119,7 +119,7 @@ if __name__ == "__main__":
             notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -137,7 +137,7 @@ if __name__ == "__main__":
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
                     notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -154,7 +154,7 @@ if __name__ == "__main__":
         params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
             instance_hostname, keyfile_name, json.dumps(additional_config), notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed installing users key")
             raise Exception
@@ -171,7 +171,7 @@ if __name__ == "__main__":
             instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['aws_region'],
             edge_instance_private_ip)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -194,7 +194,7 @@ if __name__ == "__main__":
                     os.environ['aws_region'], os.environ['notebook_r_mirror'],
                     notebook_config['ip_address'], notebook_config['exploratory_name'], edge_ip)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_deep_learning_node', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_deep_learning_node', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -209,7 +209,7 @@ if __name__ == "__main__":
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed setup git credentials")
             raise Exception
@@ -226,7 +226,7 @@ if __name__ == "__main__":
                 .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
                         notebook_config['tag_name'], notebook_config['instance_name'])
             try:
-                subprocess.run("~/scripts/{}.py {}".format('common_remove_remote_kernels', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('common_remove_remote_kernels', params), shell=True, check=True)
             except:
                 traceback.print_exc()
                 raise Exception
@@ -255,7 +255,7 @@ if __name__ == "__main__":
                     notebook_config['exploratory_name'],
                     json.dumps(additional_info))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/aws/edge_configure.py b/infrastructure-provisioning/src/general/scripts/aws/edge_configure.py
index d600ba9..1257464 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/edge_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/edge_configure.py
@@ -142,7 +142,7 @@ if __name__ == "__main__":
             edge_conf['initial_user'], edge_conf['datalab_ssh_user'], edge_conf['sudo_group'])
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -158,7 +158,7 @@ if __name__ == "__main__":
             format(edge_conf['instance_hostname'], edge_conf['keyfile_name'], edge_conf['datalab_ssh_user'],
                    os.environ['aws_region'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -198,7 +198,7 @@ if __name__ == "__main__":
             edge_conf['instance_hostname'], edge_conf['keyfile_name'], json.dumps(additional_config),
             edge_conf['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_http_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_http_proxy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -218,7 +218,7 @@ if __name__ == "__main__":
             edge_conf['instance_hostname'], edge_conf['keyfile_name'], json.dumps(additional_config),
             edge_conf['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -238,7 +238,7 @@ if __name__ == "__main__":
                               edge_conf['endpoint_name']),
             edge_conf['keycloak_client_secret'], edge_conf['step_cert_sans'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_nginx_reverse_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_nginx_reverse_proxy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -255,7 +255,7 @@ if __name__ == "__main__":
                            edge_conf['instance_hostname'], edge_conf['instance_hostname'], edge_conf['project_name'],
                            edge_conf['endpoint_name'], edge_conf['edge_hostname'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_keycloak', keycloak_params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_keycloak', keycloak_params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -274,7 +274,7 @@ if __name__ == "__main__":
                 edge_conf['instance_hostname'], edge_conf['keyfile_name'], json.dumps(additional_config),
                 edge_conf['datalab_ssh_user'])
             try:
-                subprocess.run("~/scripts/{}.py {}".format('configure_nftables', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('configure_nftables', params), shell=True, check=True)
             except:
                 traceback.print_exc()
                 raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/aws/edge_status.py b/infrastructure-provisioning/src/general/scripts/aws/edge_status.py
index 6c51394..591fc6d 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/edge_status.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/edge_status.py
@@ -47,7 +47,7 @@ if __name__ == "__main__":
         print('[COLLECTING DATA]')
         params = '--list_resources "{}"'.format(os.environ['edge_list_resources'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_collect_data', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_collect_data', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/aws/jupyter_configure.py b/infrastructure-provisioning/src/general/scripts/aws/jupyter_configure.py
index eae2533..7805519 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/jupyter_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/jupyter_configure.py
@@ -119,7 +119,7 @@ if __name__ == "__main__":
             notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -137,7 +137,7 @@ if __name__ == "__main__":
             instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
             notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -154,7 +154,7 @@ if __name__ == "__main__":
             instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['aws_region'],
             edge_instance_private_ip)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -190,7 +190,7 @@ if __name__ == "__main__":
                    notebook_config['exploratory_name'],
                    edge_ip)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_jupyter_node', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_jupyter_node', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -207,7 +207,7 @@ if __name__ == "__main__":
         params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
             instance_hostname, keyfile_name, json.dumps(additional_config), notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed installing users key")
             raise Exception
@@ -222,8 +222,8 @@ if __name__ == "__main__":
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_download_git_certfile', params), shell=True)
-            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_download_git_certfile', params), shell=True, check=True)
+            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed setup git credentials")
             raise Exception
@@ -240,7 +240,7 @@ if __name__ == "__main__":
                 .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
                         notebook_config['tag_name'], notebook_config['instance_name'])
             try:
-                subprocess.run("~/scripts/{}.py {}".format('common_remove_remote_kernels', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('common_remove_remote_kernels', params), shell=True, check=True)
             except:
                 traceback.print_exc()
                 raise Exception
@@ -269,7 +269,7 @@ if __name__ == "__main__":
                     notebook_config['exploratory_name'],
                     json.dumps(additional_info))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/aws/jupyter_dataengine-service_create_configs.py b/infrastructure-provisioning/src/general/scripts/aws/jupyter_dataengine-service_create_configs.py
index 273de3a..95a14c7 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/jupyter_dataengine-service_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/jupyter_dataengine-service_create_configs.py
@@ -58,7 +58,7 @@ yarn_dir = '/opt/' + args.emr_version + '/' + args.cluster_name + '/conf/'
 
 def r_kernel(args):
     spark_path = '/opt/{}/{}/spark/'.format(args.emr_version, args.cluster_name)
-    subprocess.run('mkdir -p {}/r_{}/'.format(kernels_dir, args.cluster_name), shell=True)
+    subprocess.run('mkdir -p {}/r_{}/'.format(kernels_dir, args.cluster_name), shell=True, check=True)
     kernel_path = "{}/r_{}/kernel.json".format(kernels_dir, args.cluster_name)
     template_file = "/tmp/r_dataengine-service_template.json"
 
@@ -81,9 +81,9 @@ def r_kernel(args):
 
 def toree_kernel(args):
     spark_path = '/opt/' + args.emr_version + '/' + args.cluster_name + '/spark/'
-    scala_version = subprocess.run("Spark-submit --version 2>&1 | awk '/Scala version / {gsub(/,/, \"\"); print $4}'", shell=True)
+    scala_version = subprocess.run("Spark-submit --version 2>&1 | awk '/Scala version / {gsub(/,/, \"\"); print $4}'", shell=True, check=True)
     if args.emr_version == 'emr-4.3.0' or args.emr_version == 'emr-4.6.0' or args.emr_version == 'emr-4.8.0':
-        subprocess.run('mkdir -p ' + kernels_dir + 'toree_' + args.cluster_name + '/', shell=True)
+        subprocess.run('mkdir -p ' + kernels_dir + 'toree_' + args.cluster_name + '/', shell=True, check=True)
         kernel_path = kernels_dir + "toree_" + args.cluster_name + "/kernel.json"
         template_file = "/tmp/toree_dataengine-service_template.json"
         with open(template_file, 'r') as f:
@@ -95,15 +95,15 @@ def toree_kernel(args):
         text = text.replace('SCALA_VERSION', args.scala_version)
         with open(kernel_path, 'w') as f:
             f.write(text)
-        subprocess.run('touch /tmp/kernel_var.json', shell=True)
+        subprocess.run('touch /tmp/kernel_var.json', shell=True, check=True)
         subprocess.run(
-            "PYJ=`find /opt/" + args.emr_version + "/" + args.cluster_name + "/spark/ -name '*py4j*.zip' | tr '\\n' ':' | sed 's|:$||g'`; cat " + kernel_path + " | sed 's|PY4J|'$PYJ'|g' > /tmp/kernel_var.json", shell=True)
-        subprocess.run('sudo mv /tmp/kernel_var.json ' + kernel_path, shell=True)
+            "PYJ=`find /opt/" + args.emr_version + "/" + args.cluster_name + "/spark/ -name '*py4j*.zip' | tr '\\n' ':' | sed 's|:$||g'`; cat " + kernel_path + " | sed 's|PY4J|'$PYJ'|g' > /tmp/kernel_var.json", shell=True, check=True)
+        subprocess.run('sudo mv /tmp/kernel_var.json ' + kernel_path, shell=True, check=True)
     else:
-        subprocess.run('mkdir -p ' + kernels_dir + 'toree_' + args.cluster_name + '/', shell=True)
-        subprocess.run('tar zxvf /tmp/toree_kernel.tar.gz -C ' + kernels_dir + 'toree_' + args.cluster_name + '/', shell=True)
-        subprocess.run('sudo mv {0}toree_{1}/toree-0.3.0-incubating/* {0}toree_{1}/'.format(kernels_dir, args.cluster_name), shell=True)
-        subprocess.run('sudo rm -r {0}toree_{1}/toree-0.3.0-incubating'.format(kernels_dir, args.cluster_name), shell=True)
+        subprocess.run('mkdir -p ' + kernels_dir + 'toree_' + args.cluster_name + '/', shell=True, check=True)
+        subprocess.run('tar zxvf /tmp/toree_kernel.tar.gz -C ' + kernels_dir + 'toree_' + args.cluster_name + '/', shell=True, check=True)
+        subprocess.run('sudo mv {0}toree_{1}/toree-0.3.0-incubating/* {0}toree_{1}/'.format(kernels_dir, args.cluster_name), shell=True, check=True)
+        subprocess.run('sudo rm -r {0}toree_{1}/toree-0.3.0-incubating'.format(kernels_dir, args.cluster_name), shell=True, check=True)
         kernel_path = kernels_dir + "toree_" + args.cluster_name + "/kernel.json"
         template_file = "/tmp/toree_dataengine-service_templatev2.json"
         with open(template_file, 'r') as f:
@@ -116,12 +116,12 @@ def toree_kernel(args):
         text = text.replace('SCALA_VERSION', args.scala_version)
         with open(kernel_path, 'w') as f:
             f.write(text)
-        subprocess.run('touch /tmp/kernel_var.json', shell=True)
+        subprocess.run('touch /tmp/kernel_var.json', shell=True, check=True)
         subprocess.run(
             "PYJ=`find /opt/" + args.emr_version + "/" + args.cluster_name +
             "/spark/ -name '*py4j*.zip' | tr '\\n' ':' | sed 's|:$||g'`; cat " + kernel_path +
-            " | sed 's|PY4J|'$PYJ'|g' > /tmp/kernel_var.json", shell=True)
-        subprocess.run('sudo mv /tmp/kernel_var.json ' + kernel_path, shell=True)
+            " | sed 's|PY4J|'$PYJ'|g' > /tmp/kernel_var.json", shell=True, check=True)
+        subprocess.run('sudo mv /tmp/kernel_var.json ' + kernel_path, shell=True, check=True)
         run_sh_path = kernels_dir + "toree_" + args.cluster_name + "/bin/run.sh"
         template_sh_file = '/tmp/run_template.sh'
         with open(template_sh_file, 'r') as f:
@@ -136,58 +136,58 @@ def add_breeze_library_emr(args):
     spark_defaults_path = '/opt/' + args.emr_version + '/' + args.cluster_name + '/spark/conf/spark-defaults.conf'
     new_jars_directory_path = '/opt/' + args.emr_version + '/jars/usr/other/'
     breeze_tmp_dir = '/tmp/breeze_tmp_emr/'
-    subprocess.run('sudo mkdir -p ' + new_jars_directory_path, shell=True)
-    subprocess.run('mkdir -p ' + breeze_tmp_dir, shell=True)
+    subprocess.run('sudo mkdir -p ' + new_jars_directory_path, shell=True, check=True)
+    subprocess.run('mkdir -p ' + breeze_tmp_dir, shell=True, check=True)
     subprocess.run('wget https://repo1.maven.org/maven2/org/scalanlp/breeze_2.11/0.12/breeze_2.11-0.12.jar -O ' +
-          breeze_tmp_dir + 'breeze_2.11-0.12.jar', shell=True)
+          breeze_tmp_dir + 'breeze_2.11-0.12.jar', shell=True, check=True)
     subprocess.run('wget https://repo1.maven.org/maven2/org/scalanlp/breeze-natives_2.11/0.12/breeze-natives_2.11-0.12.jar -O '
-          + breeze_tmp_dir + 'breeze-natives_2.11-0.12.jar', shell=True)
+          + breeze_tmp_dir + 'breeze-natives_2.11-0.12.jar', shell=True, check=True)
     subprocess.run('wget https://repo1.maven.org/maven2/org/scalanlp/breeze-viz_2.11/0.12/breeze-viz_2.11-0.12.jar -O ' +
-          breeze_tmp_dir + 'breeze-viz_2.11-0.12.jar', shell=True)
+          breeze_tmp_dir + 'breeze-viz_2.11-0.12.jar', shell=True, check=True)
     subprocess.run('wget https://repo1.maven.org/maven2/org/scalanlp/breeze-macros_2.11/0.12/breeze-macros_2.11-0.12.jar -O ' +
-          breeze_tmp_dir + 'breeze-macros_2.11-0.12.jar', shell=True)
+          breeze_tmp_dir + 'breeze-macros_2.11-0.12.jar', shell=True, check=True)
     subprocess.run('wget https://repo1.maven.org/maven2/org/scalanlp/breeze-parent_2.11/0.12/breeze-parent_2.11-0.12.jar -O ' +
-          breeze_tmp_dir + 'breeze-parent_2.11-0.12.jar', shell=True)
+          breeze_tmp_dir + 'breeze-parent_2.11-0.12.jar', shell=True, check=True)
     subprocess.run('wget https://repo1.maven.org/maven2/org/jfree/jfreechart/1.0.19/jfreechart-1.0.19.jar -O ' +
-          breeze_tmp_dir + 'jfreechart-1.0.19.jar', shell=True)
+          breeze_tmp_dir + 'jfreechart-1.0.19.jar', shell=True, check=True)
     subprocess.run('wget https://repo1.maven.org/maven2/org/jfree/jcommon/1.0.24/jcommon-1.0.24.jar -O ' +
-          breeze_tmp_dir + 'jcommon-1.0.24.jar', shell=True)
+          breeze_tmp_dir + 'jcommon-1.0.24.jar', shell=True, check=True)
     subprocess.run('wget --no-check-certificate https://brunelvis.org/jar/spark-kernel-brunel-all-2.3.jar -O ' +
-          breeze_tmp_dir + 'spark-kernel-brunel-all-2.3.jar', shell=True)
-    subprocess.run('sudo mv ' + breeze_tmp_dir + '* ' + new_jars_directory_path, shell=True)
+          breeze_tmp_dir + 'spark-kernel-brunel-all-2.3.jar', shell=True, check=True)
+    subprocess.run('sudo mv ' + breeze_tmp_dir + '* ' + new_jars_directory_path, shell=True, check=True)
     subprocess.run(""" sudo bash -c "sed -i '/spark.driver.extraClassPath/s/$/:\/opt\/""" + args.emr_version +
-          """\/jars\/usr\/other\/*/' """ + spark_defaults_path + """" """, shell=True)
+          """\/jars\/usr\/other\/*/' """ + spark_defaults_path + """" """, shell=True, check=True)
 
 def install_sparkamagic_kernels(args):
     try:
-        subprocess.run('sudo jupyter nbextension enable --py --sys-prefix widgetsnbextension', shell=True)
-        sparkmagic_dir = subprocess.run("sudo pip3 show sparkmagic | grep 'Location: ' | awk '{print $2}'", capture_output=True, shell=True)
-        subprocess.run('sudo jupyter-kernelspec install {}/sparkmagic/kernels/sparkkernel --user'.format(sparkmagic_dir), shell=True)
-        subprocess.run('sudo jupyter-kernelspec install {}/sparkmagic/kernels/pysparkkernel --user'.format(sparkmagic_dir), shell=True)
-        subprocess.run('sudo jupyter-kernelspec install {}/sparkmagic/kernels/sparkrkernel --user'.format(sparkmagic_dir), shell=True)
+        subprocess.run('sudo jupyter nbextension enable --py --sys-prefix widgetsnbextension', shell=True, check=True)
+        sparkmagic_dir = subprocess.run("sudo pip3 show sparkmagic | grep 'Location: ' | awk '{print $2}'", capture_output=True, shell=True, check=True)
+        subprocess.run('sudo jupyter-kernelspec install {}/sparkmagic/kernels/sparkkernel --user'.format(sparkmagic_dir), shell=True, check=True)
+        subprocess.run('sudo jupyter-kernelspec install {}/sparkmagic/kernels/pysparkkernel --user'.format(sparkmagic_dir), shell=True, check=True)
+        subprocess.run('sudo jupyter-kernelspec install {}/sparkmagic/kernels/sparkrkernel --user'.format(sparkmagic_dir), shell=True, check=True)
         pyspark_kernel_name = 'PySpark (Python-{0} / Spark-{1} ) [{2}]'.format(args.python_version, args.spark_version,
                                                                          args.cluster_name)
         subprocess.run('sed -i \'s|PySpark|{0}|g\' /home/{1}/.local/share/jupyter/kernels/pysparkkernel/kernel.json'.format(
-            pyspark_kernel_name, args.os_user), shell=True)
+            pyspark_kernel_name, args.os_user), shell=True, check=True)
         spark_kernel_name = 'Spark (Scala-{0} / Spark-{1} ) [{2}]'.format(args.scala_version, args.spark_version,
                                                                          args.cluster_name)
         subprocess.run('sed -i \'s|Spark|{0}|g\' /home/{1}/.local/share/jupyter/kernels/sparkkernel/kernel.json'.format(
-            spark_kernel_name, args.os_user), shell=True)
+            spark_kernel_name, args.os_user), shell=True, check=True)
         sparkr_kernel_name = 'SparkR (R-{0} / Spark-{1} ) [{2}]'.format(args.r_version, args.spark_version,
                                                                             args.cluster_name)
         subprocess.run('sed -i \'s|SparkR|{0}|g\' /home/{1}/.local/share/jupyter/kernels/sparkrkernel/kernel.json'.format(
-            sparkr_kernel_name, args.os_user), shell=True)
+            sparkr_kernel_name, args.os_user), shell=True, check=True)
         subprocess.run('sudo mv -f /home/{0}/.local/share/jupyter/kernels/pysparkkernel '
-              '/home/{0}/.local/share/jupyter/kernels/pysparkkernel_{1}'.format(args.os_user, args.cluster_name), shell=True)
+              '/home/{0}/.local/share/jupyter/kernels/pysparkkernel_{1}'.format(args.os_user, args.cluster_name), shell=True, check=True)
         subprocess.run('sudo mv -f /home/{0}/.local/share/jupyter/kernels/sparkkernel '
-              '/home/{0}/.local/share/jupyter/kernels/sparkkernel_{1}'.format(args.os_user, args.cluster_name), shell=True)
+              '/home/{0}/.local/share/jupyter/kernels/sparkkernel_{1}'.format(args.os_user, args.cluster_name), shell=True, check=True)
         subprocess.run('sudo mv -f /home/{0}/.local/share/jupyter/kernels/sparkrkernel '
-              '/home/{0}/.local/share/jupyter/kernels/sparkrkernel_{1}'.format(args.os_user, args.cluster_name), shell=True)
-        subprocess.run('mkdir -p /home/' + args.os_user + '/.sparkmagic', shell=True)
-        subprocess.run('cp -f /tmp/sparkmagic_config_template.json /home/' + args.os_user + '/.sparkmagic/config.json', shell=True)
+              '/home/{0}/.local/share/jupyter/kernels/sparkrkernel_{1}'.format(args.os_user, args.cluster_name), shell=True, check=True)
+        subprocess.run('mkdir -p /home/' + args.os_user + '/.sparkmagic', shell=True, check=True)
+        subprocess.run('cp -f /tmp/sparkmagic_config_template.json /home/' + args.os_user + '/.sparkmagic/config.json', shell=True, check=True)
         subprocess.run('sed -i \'s|LIVY_HOST|{0}|g\' /home/{1}/.sparkmagic/config.json'.format(
-                args.master_ip, args.os_user), shell=True)
-        subprocess.run('sudo chown -R {0}:{0} /home/{0}/.sparkmagic/'.format(args.os_user), shell=True)
+                args.master_ip, args.os_user), shell=True, check=True)
+        subprocess.run('sudo chown -R {0}:{0} /home/{0}/.sparkmagic/'.format(args.os_user), shell=True, check=True)
     except:
         sys.exit(1)
 
diff --git a/infrastructure-provisioning/src/general/scripts/aws/jupyter_install_dataengine-service_kernels.py b/infrastructure-provisioning/src/general/scripts/aws/jupyter_install_dataengine-service_kernels.py
index 22daa2a..914b6d4 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/jupyter_install_dataengine-service_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/jupyter_install_dataengine-service_kernels.py
@@ -65,7 +65,7 @@ def configure_notebook(args):
     conn.sudo('chmod 755 /usr/local/bin/jupyter_dataengine-service_create_configs.py')
     conn.sudo('mkdir -p /usr/lib/python3.8/datalab/')
     conn.run('mkdir -p /tmp/datalab_libs/')
-    subprocess.run('scp -i {} /usr/lib/python3.8/datalab/*.py {}:/tmp/datalab_libs/'.format(args.keyfile, env.host_string), shell=True)
+    subprocess.run('scp -i {} /usr/lib/python3.8/datalab/*.py {}:/tmp/datalab_libs/'.format(args.keyfile, env.host_string), shell=True, check=True)
     conn.run('chmod a+x /tmp/datalab_libs/*')
     conn.sudo('mv /tmp/datalab_libs/* /usr/lib/python3.8/datalab/')
     if exists('/usr/lib64'):
diff --git a/infrastructure-provisioning/src/general/scripts/aws/jupyterlab_configure.py b/infrastructure-provisioning/src/general/scripts/aws/jupyterlab_configure.py
index 2214975..2e1a366 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/jupyterlab_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/jupyterlab_configure.py
@@ -119,7 +119,7 @@ if __name__ == "__main__":
              notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -137,7 +137,7 @@ if __name__ == "__main__":
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
                     notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -153,7 +153,7 @@ if __name__ == "__main__":
         params = "--hostname {} --keyfile {} --user {} --region {}". \
             format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['aws_region'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -189,7 +189,7 @@ if __name__ == "__main__":
                    notebook_config['ip_address'],
                    notebook_config['exploratory_name'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_jupyterlab_node', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_jupyterlab_node', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -206,7 +206,7 @@ if __name__ == "__main__":
         params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
             instance_hostname, keyfile_name, json.dumps(additional_config), notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed installing users key")
             raise Exception
@@ -221,8 +221,8 @@ if __name__ == "__main__":
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_download_git_certfile', params), shell=True)
-            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_download_git_certfile', params), shell=True, check=True)
+            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed setup git credentials")
             raise Exception
@@ -239,7 +239,7 @@ if __name__ == "__main__":
                 .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
                         notebook_config['tag_name'], notebook_config['instance_name'])
             try:
-                subprocess.run("~/scripts/{}.py {}".format('common_remove_remote_kernels', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('common_remove_remote_kernels', params), shell=True, check=True)
             except:
                 traceback.print_exc()
                 raise Exception
@@ -268,7 +268,7 @@ if __name__ == "__main__":
                     notebook_config['exploratory_name'],
                     json.dumps(additional_info))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
@@ -303,7 +303,7 @@ if __name__ == "__main__":
                    keyfile_name,
                    notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/configure_proxy_for_docker.py {}".format(params), shell=True)
+            subprocess.run("~/scripts/configure_proxy_for_docker.py {}".format(params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -322,7 +322,7 @@ if __name__ == "__main__":
                    keyfile_name,
                    notebook_config['datalab_ssh_user'])
         try:
-           subprocess.run("~/scripts/jupyterlab_container_start.py {}".format(params), shell=True)
+           subprocess.run("~/scripts/jupyterlab_container_start.py {}".format(params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/aws/odahu_deploy.py b/infrastructure-provisioning/src/general/scripts/aws/odahu_deploy.py
index d065257..64a67a8 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/odahu_deploy.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/odahu_deploy.py
@@ -107,7 +107,7 @@ if __name__ == "__main__":
 
     print('Preparing parameters file')
     try:
-        subprocess.run("cp /root/templates/profile.json /tmp/", shell=True)
+        subprocess.run("cp /root/templates/profile.json /tmp/", shell=True, check=True)
         with open("/tmp/profile.json", 'w') as profile:
             prof ={
                     "allowed_ips": odahu_conf['allowed_cidr'],
@@ -257,8 +257,8 @@ if __name__ == "__main__":
                     }
                     }
             profile.write(json.dumps(prof))
-        subprocess.run('cat /tmp/profile.json', shell=True)
-        subprocess.run('cp /tmp/profile.json /', shell=True)
+        subprocess.run('cat /tmp/profile.json', shell=True, check=True)
+        subprocess.run('cp /tmp/profile.json /', shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to configure parameter file.", str(err))
@@ -267,8 +267,8 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        subprocess.run('tf_runner create -o /tmp/result.json', shell=True)
-        subprocess.run("sed -i 's|name|description|g' /tmp/result.json", shell=True)
+        subprocess.run('tf_runner create -o /tmp/result.json', shell=True, check=True)
+        subprocess.run("sed -i 's|name|description|g' /tmp/result.json", shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to deploy Odahu cluster.", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/aws/odahu_prepare.py b/infrastructure-provisioning/src/general/scripts/aws/odahu_prepare.py
index 30953d1..4a8e9e7 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/odahu_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/odahu_prepare.py
@@ -83,7 +83,7 @@ if __name__ == "__main__":
         params = "--bucket_name {0} --bucket_tags {1} --region {2} --bucket_name_tag {0}". \
             format(odahu_conf['bucket_name'], odahu_conf['bucket_tags'], odahu_conf['region'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_bucket', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_bucket', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/aws/project_prepare.py b/infrastructure-provisioning/src/general/scripts/aws/project_prepare.py
index 1724199..8eb4c5d 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/project_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/project_prepare.py
@@ -139,7 +139,7 @@ if __name__ == "__main__":
             project_conf['user_key'] = os.environ['key']
             try:
                 subprocess.run('echo "{0}" >> {1}{2}.pub'.format(project_conf['user_key'], os.environ['conf_key_dir'],
-                                                        project_conf['project_name']), shell=True)
+                                                        project_conf['project_name']), shell=True, check=True)
             except:
                 print("ADMINSs PUBLIC KEY DOES NOT INSTALLED")
         except KeyError:
@@ -197,7 +197,7 @@ if __name__ == "__main__":
                   project_conf['private_subnet_name'],
                   project_conf['zone'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_subnet', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_subnet', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -224,7 +224,7 @@ if __name__ == "__main__":
                          project_conf['edge_policy_name'], os.environ['aws_region'], project_conf['tag_name'],
                          project_conf['service_base_name'], user_tag)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_role_policy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_role_policy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -244,7 +244,7 @@ if __name__ == "__main__":
                          project_conf['notebook_dataengine_policy_name'], os.environ['aws_region'],
                          project_conf['tag_name'], project_conf['service_base_name'], user_tag)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_role_policy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_role_policy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -419,7 +419,7 @@ if __name__ == "__main__":
                        project_conf['service_base_name'], project_conf['edge_instance_name'], json.dumps(edge_sg_egress),
                        True, project_conf['notebook_instance_name'], 'edge')
             try:
-                subprocess.run("~/scripts/{}.py {}".format('common_create_security_group', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('common_create_security_group', params), shell=True, check=True)
             except Exception as err:
                 traceback.print_exc()
                 datalab.fab.append_result("Failed creating security group for edge node.", str(err))
@@ -501,7 +501,7 @@ if __name__ == "__main__":
                                                           project_conf['service_base_name'],
                                                           project_conf['notebook_instance_name'], True)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_security_group', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_security_group', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -527,7 +527,7 @@ if __name__ == "__main__":
                                                           project_conf['service_base_name'],
                                                           project_conf['dataengine_instances_name'], True)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_security_group', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_security_group', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -549,7 +549,7 @@ if __name__ == "__main__":
                                                           project_conf['service_base_name'],
                                                           project_conf['dataengine_instances_name'], True)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_security_group', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_security_group', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -573,7 +573,7 @@ if __name__ == "__main__":
             format(project_conf['shared_bucket_name'], project_conf['shared_bucket_tags'], project_conf['region'],
                    project_conf['shared_bucket_name_tag'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_bucket', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_bucket', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -585,7 +585,7 @@ if __name__ == "__main__":
                  .format(project_conf['bucket_name'], project_conf['bucket_tags'], project_conf['region'],
                          project_conf['bucket_name_tag'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_bucket', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_bucket', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -609,7 +609,7 @@ if __name__ == "__main__":
                   project_conf['service_base_name'], project_conf['region'],
                   os.environ['aws_user_predefined_s3_policies'], project_conf['endpoint_name'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_policy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_policy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -637,7 +637,7 @@ if __name__ == "__main__":
                     project_conf['edge_role_profile_name'], project_conf['tag_name'],
                     project_conf['edge_instance_name'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_instance', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_instance', params), shell=True, check=True)
             edge_instance = datalab.meta_lib.get_instance_by_name(project_conf['tag_name'],
                                                                   project_conf['edge_instance_name'])
             if os.environ['edge_is_nat']:
@@ -673,7 +673,7 @@ if __name__ == "__main__":
                 project_conf['elastic_ip'], project_conf['edge_id'], project_conf['tag_name'],
                 project_conf['elastic_ip_name'])
             try:
-                subprocess.run("~/scripts/{}.py {}".format('edge_associate_elastic_ip', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('edge_associate_elastic_ip', params), shell=True, check=True)
             except:
                 traceback.print_exc()
                 raise Exception
@@ -704,7 +704,7 @@ if __name__ == "__main__":
             params = "--vpc_id {} --infra_tag_value {} --edge_instance_id {} --private_subnet_id {}".format(
                 project_conf['vpc2_id'], project_conf['nat_rt_name'], edge_instance, subnet_id)
             try:
-                subprocess.run("~/scripts/{}.py {}".format('edge_configure_route_table', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('edge_configure_route_table', params), shell=True, check=True)
             except:
                 traceback.print_exc()
                 raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/aws/rstudio_configure.py b/infrastructure-provisioning/src/general/scripts/aws/rstudio_configure.py
index 54d5ad9..70e5e2c 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/rstudio_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/rstudio_configure.py
@@ -121,7 +121,7 @@ if __name__ == "__main__":
             notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -139,7 +139,7 @@ if __name__ == "__main__":
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
                     notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -156,7 +156,7 @@ if __name__ == "__main__":
             format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['aws_region'],
                    edge_instance_private_ip)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -179,7 +179,7 @@ if __name__ == "__main__":
                     os.environ['notebook_r_mirror'], notebook_config['ip_address'],
                     notebook_config['exploratory_name'], edge_ip)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_rstudio_node', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_rstudio_node', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -196,7 +196,7 @@ if __name__ == "__main__":
         params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
             instance_hostname, keyfile_name, json.dumps(additional_config), notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -211,7 +211,7 @@ if __name__ == "__main__":
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed setup git credentials")
             raise Exception
@@ -228,7 +228,7 @@ if __name__ == "__main__":
                 .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
                         notebook_config['tag_name'], notebook_config['instance_name'])
             try:
-                subprocess.run("~/scripts/{}.py {}".format('common_remove_remote_kernels', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('common_remove_remote_kernels', params), shell=True, check=True)
             except:
                 traceback.print_exc()
                 raise Exception
@@ -248,7 +248,7 @@ if __name__ == "__main__":
             .format(edge_instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], 'rstudio',
                     notebook_config['exploratory_name'], json.dumps(additional_info))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/aws/rstudio_dataengine-service_create_configs.py b/infrastructure-provisioning/src/general/scripts/aws/rstudio_dataengine-service_create_configs.py
index 49484fc..6ab945a 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/rstudio_dataengine-service_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/rstudio_dataengine-service_create_configs.py
@@ -52,34 +52,34 @@ yarn_dir = '/opt/{0}/{1}/conf/'.format(args.emr_version, args.cluster_name)
 def configure_rstudio():
     if not os.path.exists('/home/' + args.os_user + '/.ensure_dir/rstudio_dataengine-service_ensured'):
         try:
-            subprocess.run('echo "export R_LIBS_USER=' + spark_dir + '/R/lib:" >> /home/' + args.os_user + '/.bashrc', shell=True)
-            subprocess.run("sed -i 's/^SPARK_HOME/#SPARK_HOME/' /home/" + args.os_user + "/.Renviron", shell=True)
-            subprocess.run('echo \'SPARK_HOME="' + spark_dir + '"\' >> /home/' + args.os_user + '/.Renviron', shell=True)
-            subprocess.run('echo \'YARN_CONF_DIR="' + yarn_dir + '"\' >> /home/' + args.os_user + '/.Renviron', shell=True)
-            subprocess.run('echo \'HADOOP_CONF_DIR="' + yarn_dir + '"\' >> /home/' + args.os_user + '/.Renviron', shell=True)
-            subprocess.run("sed -i 's/^master/#master/' /home/" + args.os_user + "/.Rprofile", shell=True)
-            subprocess.run('''R -e "source('/home/{}/.Rprofile')"'''.format(args.os_user), shell=True)
+            subprocess.run('echo "export R_LIBS_USER=' + spark_dir + '/R/lib:" >> /home/' + args.os_user + '/.bashrc', shell=True, check=True)
+            subprocess.run("sed -i 's/^SPARK_HOME/#SPARK_HOME/' /home/" + args.os_user + "/.Renviron", shell=True, check=True)
+            subprocess.run('echo \'SPARK_HOME="' + spark_dir + '"\' >> /home/' + args.os_user + '/.Renviron', shell=True, check=True)
+            subprocess.run('echo \'YARN_CONF_DIR="' + yarn_dir + '"\' >> /home/' + args.os_user + '/.Renviron', shell=True, check=True)
+            subprocess.run('echo \'HADOOP_CONF_DIR="' + yarn_dir + '"\' >> /home/' + args.os_user + '/.Renviron', shell=True, check=True)
+            subprocess.run("sed -i 's/^master/#master/' /home/" + args.os_user + "/.Rprofile", shell=True, check=True)
+            subprocess.run('''R -e "source('/home/{}/.Rprofile')"'''.format(args.os_user), shell=True, check=True)
             #fix emr 5.19 problem with warnings in rstudio because of bug in AWS configuration
             if args.emr_version == "emr-5.19.0":
-                subprocess.run("sed -i '/DRFA/s/^/#/' " + spark_dir + "conf/log4j.properties", shell=True)
-            subprocess.run('touch /home/' + args.os_user + '/.ensure_dir/rstudio_dataengine-service_ensured', shell=True)
+                subprocess.run("sed -i '/DRFA/s/^/#/' " + spark_dir + "conf/log4j.properties", shell=True, check=True)
+            subprocess.run('touch /home/' + args.os_user + '/.ensure_dir/rstudio_dataengine-service_ensured', shell=True, check=True)
         except Exception as err:
             print('Error: {0}'.format(err))
             sys.exit(1)
     else:
         try:
-            subprocess.run("sed -i '/R_LIBS_USER/ { s|=\(.*\)|=\\1" + spark_dir + "/R/lib:| }' /home/" + args.os_user + "/.bashrc", shell=True)
-            subprocess.run("sed -i 's/^SPARK_HOME/#SPARK_HOME/' /home/" + args.os_user + "/.Renviron", shell=True)
-            subprocess.run("sed -i 's/^YARN_CONF_DIR/#YARN_CONF_DIR/' /home/" + args.os_user + "/.Renviron", shell=True)
-            subprocess.run("sed -i 's/^HADOOP_CONF_DIR/#HADOOP_CONF_DIR/' /home/" + args.os_user + "/.Renviron", shell=True)
-            subprocess.run("sed -i 's/^master/#master/' /home/" + args.os_user + "/.Rprofile", shell=True)
-            subprocess.run('echo \'SPARK_HOME="' + spark_dir + '"\' >> /home/' + args.os_user + '/.Renviron', shell=True)
-            subprocess.run('echo \'YARN_CONF_DIR="' + yarn_dir + '"\' >> /home/' + args.os_user + '/.Renviron', shell=True)
-            subprocess.run('echo \'HADOOP_CONF_DIR="' + yarn_dir + '"\' >> /home/' + args.os_user + '/.Renviron', shell=True)
-            subprocess.run('''R -e "source('/home/{}/.Rprofile')"'''.format(args.os_user), shell=True)
+            subprocess.run("sed -i '/R_LIBS_USER/ { s|=\(.*\)|=\\1" + spark_dir + "/R/lib:| }' /home/" + args.os_user + "/.bashrc", shell=True, check=True)
+            subprocess.run("sed -i 's/^SPARK_HOME/#SPARK_HOME/' /home/" + args.os_user + "/.Renviron", shell=True, check=True)
+            subprocess.run("sed -i 's/^YARN_CONF_DIR/#YARN_CONF_DIR/' /home/" + args.os_user + "/.Renviron", shell=True, check=True)
+            subprocess.run("sed -i 's/^HADOOP_CONF_DIR/#HADOOP_CONF_DIR/' /home/" + args.os_user + "/.Renviron", shell=True, check=True)
+            subprocess.run("sed -i 's/^master/#master/' /home/" + args.os_user + "/.Rprofile", shell=True, check=True)
+            subprocess.run('echo \'SPARK_HOME="' + spark_dir + '"\' >> /home/' + args.os_user + '/.Renviron', shell=True, check=True)
+            subprocess.run('echo \'YARN_CONF_DIR="' + yarn_dir + '"\' >> /home/' + args.os_user + '/.Renviron', shell=True, check=True)
+            subprocess.run('echo \'HADOOP_CONF_DIR="' + yarn_dir + '"\' >> /home/' + args.os_user + '/.Renviron', shell=True, check=True)
+            subprocess.run('''R -e "source('/home/{}/.Rprofile')"'''.format(args.os_user), shell=True, check=True)
             #fix emr 5.19 problem with warnings in rstudio because of bug in AWS configuration
             if args.emr_version == "emr-5.19.0":
-                subprocess.run("sed -i '/DRFA/s/^/#/' " + spark_dir + "conf/log4j.properties", shell=True)
+                subprocess.run("sed -i '/DRFA/s/^/#/' " + spark_dir + "conf/log4j.properties", shell=True, check=True)
         except Exception as err:
             print('Error: {0}'.format(err))
             sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/ssn_configure.py b/infrastructure-provisioning/src/general/scripts/aws/ssn_configure.py
index 9e3a3ac..eceb451 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/ssn_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/ssn_configure.py
@@ -187,7 +187,7 @@ if __name__ == "__main__":
             ssn_conf['initial_user'], ssn_conf['datalab_ssh_user'], ssn_conf['sudo_group'])
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -205,7 +205,7 @@ if __name__ == "__main__":
                    ssn_conf['datalab_ssh_user'], os.environ['aws_region'])
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -230,7 +230,7 @@ if __name__ == "__main__":
             os.environ['conf_tag_resource_id'], ssn_conf['step_cert_sans'])
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_ssn_node', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_ssn_node', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -263,7 +263,7 @@ if __name__ == "__main__":
                                                           os.environ['conf_cloud_provider'], os.environ['aws_region'])
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_docker', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_docker', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -616,7 +616,7 @@ if __name__ == "__main__":
                    os.environ['keycloak_client_secret'],
                    os.environ['keycloak_auth_server_url'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_ui', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_ui', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -678,14 +678,14 @@ if __name__ == "__main__":
         params = "--instance_name {} --local_log_filepath {} --os_user {} --instance_hostname {}". \
             format(ssn_conf['instance_name'], local_log_filepath, ssn_conf['datalab_ssh_user'],
                    ssn_conf['instance_hostname'])
-        subprocess.run("~/scripts/{}.py {}".format('upload_response_file', params), shell=True)
+        subprocess.run("~/scripts/{}.py {}".format('upload_response_file', params), shell=True, check=True)
 
         logging.info('[FINALIZE]')
         print('[FINALIZE]')
         params = ""
         if os.environ['conf_lifecycle_stage'] == 'prod':
             params += "--key_id {}".format(os.environ['aws_access_key'])
-            subprocess.run("~/scripts/{}.py {}".format('ssn_finalize', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('ssn_finalize', params), shell=True, check=True)
     except Exception as err:
         datalab.fab.append_result("Error with writing results.", str(err))
         clear_resources()
diff --git a/infrastructure-provisioning/src/general/scripts/aws/ssn_prepare.py b/infrastructure-provisioning/src/general/scripts/aws/ssn_prepare.py
index 9ddaf0a..bee2e3f 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/ssn_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/ssn_prepare.py
@@ -106,7 +106,7 @@ if __name__ == "__main__":
                 ssn_conf['vpc_cidr'], ssn_conf['region'], ssn_conf['tag_name'], ssn_conf['service_base_name'],
                 ssn_conf['vpc_name'])
             try:
-                subprocess.run("~/scripts/{}.py {}".format('ssn_create_vpc', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('ssn_create_vpc', params), shell=True, check=True)
             except:
                 traceback.print_exc()
                 raise Exception
@@ -132,7 +132,7 @@ if __name__ == "__main__":
                      "--vpc_name {}".format(ssn_conf['vpc2_cidr'], ssn_conf['region'], ssn_conf['tag2_name'],
                                             ssn_conf['service_base_name'], ssn_conf['vpc2_name'])
             try:
-                subprocess.run("~/scripts/{}.py {}".format('ssn_create_vpc', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('ssn_create_vpc', params), shell=True, check=True)
             except:
                 traceback.print_exc()
                 raise Exception
@@ -160,7 +160,7 @@ if __name__ == "__main__":
                       os.environ['aws_vpc_id'], 'ssn', ssn_conf['tag_name'],ssn_conf['service_base_name'], '20',
                       True, ssn_conf['zone_full'], ssn_conf['subnet_name'])
             try:
-                subprocess.run("~/scripts/{}.py {}".format('common_create_subnet', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('common_create_subnet', params), shell=True, check=True)
             except:
                 traceback.print_exc()
                 raise Exception
@@ -275,7 +275,7 @@ if __name__ == "__main__":
                        json.dumps(ssn_conf['ingress_sg_rules_template']), json.dumps(egress_sg_rules_template),
                        ssn_conf['service_base_name'], ssn_conf['tag_name'], False, True)
             try:
-                subprocess.run("~/scripts/{}.py {}".format('common_create_security_group', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('common_create_security_group', params), shell=True, check=True)
             except:
                 traceback.print_exc()
                 raise Exception
@@ -308,7 +308,7 @@ if __name__ == "__main__":
                    ssn_conf['policy_path'], os.environ['aws_region'], ssn_conf['tag_name'],
                    ssn_conf['service_base_name'], ssn_conf['user_tag'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_role_policy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_role_policy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -339,7 +339,7 @@ if __name__ == "__main__":
         params = "--vpc_id {} --region {} --infra_tag_name {} --infra_tag_value {}".format(
             os.environ['aws_vpc_id'], os.environ['aws_region'], ssn_conf['tag_name'], ssn_conf['service_base_name'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('ssn_create_endpoint', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('ssn_create_endpoint', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -373,7 +373,7 @@ if __name__ == "__main__":
                 os.environ['aws_vpc2_id'], os.environ['aws_region'], ssn_conf['tag2_name'],
                 ssn_conf['service_base_name'])
             try:
-                subprocess.run("~/scripts/{}.py {}".format('ssn_create_endpoint', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('ssn_create_endpoint', params), shell=True, check=True)
             except:
                 traceback.print_exc()
                 raise Exception
@@ -410,7 +410,7 @@ if __name__ == "__main__":
                    ssn_conf['role_profile_name'], ssn_conf['tag_name'], ssn_conf['instance_name'], 'ssn', '20')
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_instance', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_instance', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -450,7 +450,7 @@ if __name__ == "__main__":
             params = "--elastic_ip {} --ssn_id {} --infra_tag_name {} --infra_tag_value {}".format(
                 ssn_conf['elastic_ip'], ssn_conf['ssn_id'], ssn_conf['tag_name'], ssn_conf['elastic_ip_name'])
             try:
-                subprocess.run("~/scripts/{}.py {}".format('ssn_associate_elastic_ip', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('ssn_associate_elastic_ip', params), shell=True, check=True)
             except:
                 traceback.print_exc()
                 raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/aws/ssn_terminate.py b/infrastructure-provisioning/src/general/scripts/aws/ssn_terminate.py
index 23f6bb4..1dd2ffe 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/ssn_terminate.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/ssn_terminate.py
@@ -58,7 +58,7 @@ if __name__ == "__main__":
                  format(ssn_conf['tag_name'], ssn_conf['edge_sg'], ssn_conf['nb_sg'], ssn_conf['de_sg'],
                         ssn_conf['service_base_name'], ssn_conf['de-service_sg'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('ssn_terminate_aws_resources', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('ssn_terminate_aws_resources', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/aws/tensor-rstudio_configure.py b/infrastructure-provisioning/src/general/scripts/aws/tensor-rstudio_configure.py
index d042294..d4c0197 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/tensor-rstudio_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/tensor-rstudio_configure.py
@@ -120,7 +120,7 @@ if __name__ == "__main__":
             notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -138,7 +138,7 @@ if __name__ == "__main__":
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
                     notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -155,7 +155,7 @@ if __name__ == "__main__":
             format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['aws_region'],
                    edge_instance_private_ip)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -178,7 +178,7 @@ if __name__ == "__main__":
                     os.environ['notebook_r_mirror'], notebook_config['ip_address'],
                     notebook_config['exploratory_name'], edge_ip)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_tensor-rstudio_node', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_tensor-rstudio_node', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -195,7 +195,7 @@ if __name__ == "__main__":
         params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
             instance_hostname, keyfile_name, json.dumps(additional_config), notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -210,7 +210,7 @@ if __name__ == "__main__":
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed setup git credentials")
             raise Exception
@@ -227,7 +227,7 @@ if __name__ == "__main__":
                 .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
                         notebook_config['tag_name'], notebook_config['instance_name'])
             try:
-                subprocess.run("~/scripts/{}.py {}".format('common_remove_remote_kernels', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('common_remove_remote_kernels', params), shell=True, check=True)
             except:
                 traceback.print_exc()
                 raise Exception
@@ -247,7 +247,7 @@ if __name__ == "__main__":
             .format(edge_instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], 'rstudio',
                     notebook_config['exploratory_name'], json.dumps(additional_info))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/aws/tensor_configure.py b/infrastructure-provisioning/src/general/scripts/aws/tensor_configure.py
index 105743c..c52ce58 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/tensor_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/tensor_configure.py
@@ -119,7 +119,7 @@ if __name__ == "__main__":
             notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -137,7 +137,7 @@ if __name__ == "__main__":
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
                     notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -154,7 +154,7 @@ if __name__ == "__main__":
             format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['aws_region'],
                    edge_instance_private_ip)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -174,7 +174,7 @@ if __name__ == "__main__":
                          os.environ['aws_region'], notebook_config['datalab_ssh_user'],
                          notebook_config['ip_address'], notebook_config['exploratory_name'], edge_ip)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_tensor_node', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_tensor_node', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -191,7 +191,7 @@ if __name__ == "__main__":
         params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
             instance_hostname, keyfile_name, json.dumps(additional_config), notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -206,7 +206,7 @@ if __name__ == "__main__":
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed setup git credentials")
             raise Exception
@@ -223,7 +223,7 @@ if __name__ == "__main__":
                 .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
                         notebook_config['tag_name'], notebook_config['instance_name'])
             try:
-                subprocess.run("~/scripts/{}.py {}".format('common_remove_remote_kernels', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('common_remove_remote_kernels', params), shell=True, check=True)
             except:
                 traceback.print_exc()
                 raise Exception
@@ -243,7 +243,7 @@ if __name__ == "__main__":
             .format(edge_instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], 'jupyter',
                     notebook_config['exploratory_name'], json.dumps(additional_info))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/aws/zeppelin_configure.py b/infrastructure-provisioning/src/general/scripts/aws/zeppelin_configure.py
index 794e53e..ec5d080 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/zeppelin_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/zeppelin_configure.py
@@ -125,7 +125,7 @@ if __name__ == "__main__":
             notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -143,7 +143,7 @@ if __name__ == "__main__":
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
                     notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -160,7 +160,7 @@ if __name__ == "__main__":
             .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['aws_region'],
                     edge_instance_private_ip)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -198,7 +198,7 @@ if __name__ == "__main__":
                     os.environ['notebook_r_mirror'], notebook_config['endpoint_url'], notebook_config['ip_address'],
                     notebook_config['exploratory_name'], edge_ip)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_zeppelin_node', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_zeppelin_node', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -215,7 +215,7 @@ if __name__ == "__main__":
         params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
             instance_hostname, keyfile_name, json.dumps(additional_config), notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -230,7 +230,7 @@ if __name__ == "__main__":
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed setup git credentials")
             raise Exception
@@ -247,7 +247,7 @@ if __name__ == "__main__":
                 .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
                         notebook_config['tag_name'], notebook_config['instance_name'])
             try:
-                subprocess.run("~/scripts/{}.py {}".format('common_remove_remote_kernels', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('common_remove_remote_kernels', params), shell=True, check=True)
             except:
                 traceback.print_exc()
                 raise Exception
@@ -267,7 +267,7 @@ if __name__ == "__main__":
             .format(edge_instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], 'zeppelin',
                     notebook_config['exploratory_name'], json.dumps(additional_info))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/aws/zeppelin_dataengine-service_create_configs.py b/infrastructure-provisioning/src/general/scripts/aws/zeppelin_dataengine-service_create_configs.py
index c24f008..cdd1542 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/zeppelin_dataengine-service_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/zeppelin_dataengine-service_create_configs.py
@@ -64,20 +64,20 @@ else:
 
 
 def install_remote_livy(args):
-    subprocess.run('sudo chown ' + args.os_user + ':' + args.os_user + ' -R /opt/zeppelin/', shell=True)
-    subprocess.run('sudo service zeppelin-notebook stop', shell=True)
+    subprocess.run('sudo chown ' + args.os_user + ':' + args.os_user + ' -R /opt/zeppelin/', shell=True, check=True)
+    subprocess.run('sudo service zeppelin-notebook stop', shell=True, check=True)
     subprocess.run('sudo -i wget http://archive.cloudera.com/beta/livy/livy-server-' + args.livy_version + '.zip -O /opt/'
-          + args.emr_version + '/' + args.cluster_name + '/livy-server-' + args.livy_version + '.zip', shell=True)
+          + args.emr_version + '/' + args.cluster_name + '/livy-server-' + args.livy_version + '.zip', shell=True, check=True)
     subprocess.run('sudo unzip /opt/'
           + args.emr_version + '/' + args.cluster_name + '/livy-server-' + args.livy_version + '.zip -d /opt/'
-          + args.emr_version + '/' + args.cluster_name + '/', shell=True)
+          + args.emr_version + '/' + args.cluster_name + '/', shell=True, check=True)
     subprocess.run('sudo mv /opt/' + args.emr_version + '/' + args.cluster_name + '/livy-server-' + args.livy_version +
-          '/ /opt/' + args.emr_version + '/' + args.cluster_name + '/livy/', shell=True)
+          '/ /opt/' + args.emr_version + '/' + args.cluster_name + '/livy/', shell=True, check=True)
     livy_path = '/opt/' + args.emr_version + '/' + args.cluster_name + '/livy/'
-    subprocess.run('sudo mkdir -p ' + livy_path + '/logs', shell=True)
-    subprocess.run('sudo mkdir -p /var/run/livy', shell=True)
-    subprocess.run('sudo chown ' + args.os_user + ':' + args.os_user + ' -R /var/run/livy', shell=True)
-    subprocess.run('sudo chown ' + args.os_user + ':' + args.os_user + ' -R ' + livy_path, shell=True)
+    subprocess.run('sudo mkdir -p ' + livy_path + '/logs', shell=True, check=True)
+    subprocess.run('sudo mkdir -p /var/run/livy', shell=True, check=True)
+    subprocess.run('sudo chown ' + args.os_user + ':' + args.os_user + ' -R /var/run/livy', shell=True, check=True)
+    subprocess.run('sudo chown ' + args.os_user + ':' + args.os_user + ' -R ' + livy_path, shell=True, check=True)
 
 
 if __name__ == "__main__":
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_create_notebook_image.py b/infrastructure-provisioning/src/general/scripts/azure/common_create_notebook_image.py
index c23bd08..294d7a8 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_create_notebook_image.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_create_notebook_image.py
@@ -86,7 +86,7 @@ if __name__ == "__main__":
                                                     json.dumps(image_conf['tags']))
             print("Image was successfully created.")
             try:
-                subprocess.run("~/scripts/{}.py".format('common_prepare_notebook'), shell=True)
+                subprocess.run("~/scripts/{}.py".format('common_prepare_notebook'), shell=True, check=True)
                 instance_running = False
                 while not instance_running:
                     if AzureMeta.get_instance_status(image_conf['resource_group_name'],
@@ -102,7 +102,7 @@ if __name__ == "__main__":
                 params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}" \
                     .format(instance_hostname, image_conf['instance_name'], keyfile_name,
                             json.dumps(additional_config), image_conf['datalab_ssh_user'])
-                subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
                 print("Image was successfully created. It's name is {}".format(image_conf['full_image_name']))
             except Exception as err:
                 AzureActions.remove_instance(image_conf['resource_group_name'], image_conf['instance_name'])
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_notebook_configure_dataengine.py b/infrastructure-provisioning/src/general/scripts/azure/common_notebook_configure_dataengine.py
index e5f0afb..0e4f206 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_notebook_configure_dataengine.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_notebook_configure_dataengine.py
@@ -106,7 +106,7 @@ if __name__ == "__main__":
                    notebook_config['spark_master_url'], notebook_config['key_path'], notebook_config['notebook_ip'],
                    os.environ['azure_datalake_enable'], notebook_config['spark_master_ip'])
         try:
-            subprocess.run("~/scripts/{}_{}.py {}".format(os.environ['application'], 'install_dataengine_kernels', params), shell=True)
+            subprocess.run("~/scripts/{}_{}.py {}".format(os.environ['application'], 'install_dataengine_kernels', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -127,7 +127,7 @@ if __name__ == "__main__":
                     notebook_config['datalab_ssh_user'],
                     notebook_config['cluster_name'])
         try:
-            subprocess.run("~/scripts/{0}.py {1}".format('common_configure_spark', params), shell=True)
+            subprocess.run("~/scripts/{0}.py {1}".format('common_configure_spark', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_prepare_notebook.py b/infrastructure-provisioning/src/general/scripts/azure/common_prepare_notebook.py
index 0a174c1..b7dbea5 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_prepare_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_prepare_notebook.py
@@ -169,7 +169,7 @@ if __name__ == "__main__":
                    notebook_config['instance_storage_account_type'], notebook_config['image_name'],
                    notebook_config['image_type'], json.dumps(notebook_config['tags']))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_instance', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_instance', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_reupload_key.py b/infrastructure-provisioning/src/general/scripts/azure/common_reupload_key.py
index 3141cb3..73dff2f 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_reupload_key.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_reupload_key.py
@@ -45,7 +45,7 @@ if __name__ == "__main__":
         params = "--user {} --hostname {} --keyfile '{}' --additional_config '{}'".format(
             args.os_user, ip, args.keyfile, args.additional_config)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except Exception as err:
             print('Error: {0}'.format(err))
             sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_start_notebook.py b/infrastructure-provisioning/src/general/scripts/azure/common_start_notebook.py
index 7f7de84..6fab6e5 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_start_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_start_notebook.py
@@ -70,7 +70,7 @@ if __name__ == "__main__":
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(os.environ['conf_os_user'], notebook_config['notebook_ip'], notebook_config['keyfile'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -107,7 +107,7 @@ if __name__ == "__main__":
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(os.environ['conf_os_user'], notebook_config['notebook_ip'], notebook_config['keyfile'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('update_inactivity_on_start', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('update_inactivity_on_start', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/azure/dataengine_configure.py b/infrastructure-provisioning/src/general/scripts/azure/dataengine_configure.py
index 444b206..6d7dde5 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/dataengine_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/dataengine_configure.py
@@ -45,7 +45,7 @@ def configure_slave(slave_number, data_engine):
              data_engine['datalab_ssh_user'], sudo_group)
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -63,7 +63,7 @@ def configure_slave(slave_number, data_engine):
             slave_hostname, os.environ['conf_key_dir'] + data_engine['key_name'] + ".pem",
             json.dumps(additional_config), data_engine['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -78,7 +78,7 @@ def configure_slave(slave_number, data_engine):
         params = '--hostname {} --keyfile {} --os_user {} --application {}' \
             .format(slave_hostname, keyfile_name, data_engine['datalab_ssh_user'], os.environ['application'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_clean_instance', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_clean_instance', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -95,7 +95,7 @@ def configure_slave(slave_number, data_engine):
             .format(slave_hostname, slave_name, keyfile_name, json.dumps(additional_config),
                     data_engine['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -111,7 +111,7 @@ def configure_slave(slave_number, data_engine):
             format(slave_hostname, keyfile_name, data_engine['datalab_ssh_user'], data_engine['region'],
                    edge_instance_private_hostname)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -130,7 +130,7 @@ def configure_slave(slave_number, data_engine):
                    os.environ['notebook_scala_version'], os.environ['notebook_r_mirror'], master_node_hostname,
                    'slave')
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_dataengine', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_dataengine', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -236,7 +236,7 @@ if __name__ == "__main__":
              data_engine['datalab_ssh_user'], sudo_group)
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -254,7 +254,7 @@ if __name__ == "__main__":
             master_node_hostname, os.environ['conf_key_dir'] + data_engine['key_name'] + ".pem", json.dumps(
                 additional_config), data_engine['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -270,7 +270,7 @@ if __name__ == "__main__":
         params = '--hostname {} --keyfile {} --os_user {} --application {}' \
             .format(master_node_hostname, keyfile_name, data_engine['datalab_ssh_user'], os.environ['application'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_clean_instance', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_clean_instance', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -287,7 +287,7 @@ if __name__ == "__main__":
             .format(master_node_hostname, data_engine['master_node_name'], keyfile_name, json.dumps(additional_config),
                     data_engine['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -303,7 +303,7 @@ if __name__ == "__main__":
             format(master_node_hostname, keyfile_name, data_engine['datalab_ssh_user'], data_engine['region'],
                    edge_instance_private_hostname)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -322,7 +322,7 @@ if __name__ == "__main__":
                    os.environ['notebook_scala_version'], os.environ['notebook_r_mirror'], master_node_hostname,
                    'master')
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_dataengine', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_dataengine', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -374,7 +374,7 @@ if __name__ == "__main__":
                     data_engine['exploratory_name'],
                     json.dumps(additional_info))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/azure/dataengine_prepare.py b/infrastructure-provisioning/src/general/scripts/azure/dataengine_prepare.py
index 769dba3..4727f1d 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/dataengine_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/dataengine_prepare.py
@@ -178,7 +178,7 @@ if __name__ == "__main__":
                    data_engine['project_name'], data_engine['instance_storage_account_type'],
                    data_engine['image_name'], data_engine['image_type'], json.dumps(data_engine['master_tags']))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_instance', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_instance', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -212,7 +212,7 @@ if __name__ == "__main__":
                        data_engine['project_name'], data_engine['instance_storage_account_type'],
                        data_engine['image_name'], data_engine['image_type'], json.dumps(data_engine['slave_tags']))
             try:
-                subprocess.run("~/scripts/{}.py {}".format('common_create_instance', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('common_create_instance', params), shell=True, check=True)
             except:
                 traceback.print_exc()
                 raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/azure/dataengine_start.py b/infrastructure-provisioning/src/general/scripts/azure/dataengine_start.py
index 3d93cf8..2f100fd 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/dataengine_start.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/dataengine_start.py
@@ -100,7 +100,7 @@ if __name__ == "__main__":
             .format(os.environ['conf_os_user'], data_engine['notebook_ip'], data_engine['keyfile'],
                     data_engine['computational_ip'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('update_inactivity_on_start', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('update_inactivity_on_start', params), shell=True, check=True)
         except Exception as err:
             traceback.print_exc()
             datalab.fab.append_result("Failed to update last activity time.", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/azure/deeplearning_configure.py b/infrastructure-provisioning/src/general/scripts/azure/deeplearning_configure.py
index c1d8016..4a1cb0c 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/deeplearning_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/deeplearning_configure.py
@@ -141,7 +141,7 @@ if __name__ == "__main__":
             notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -159,7 +159,7 @@ if __name__ == "__main__":
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
                     notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -176,7 +176,7 @@ if __name__ == "__main__":
         params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
             instance_hostname, keyfile_name, json.dumps(additional_config), notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed installing users key")
             raise Exception
@@ -193,7 +193,7 @@ if __name__ == "__main__":
             instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['azure_region'],
             edge_instance_private_hostname)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -216,7 +216,7 @@ if __name__ == "__main__":
                     os.environ['azure_region'], os.environ['notebook_r_mirror'],
                     notebook_config['ip_address'], notebook_config['exploratory_name'], edge_hostname)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_deep_learning_node', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_deep_learning_node', params), shell=True, check=True)
             datalab.actions_lib.remount_azure_disk(True, notebook_config['datalab_ssh_user'], instance_hostname,
                                                    os.environ['conf_key_dir'] + os.environ['conf_key_name'] + ".pem")
         except:
@@ -233,7 +233,7 @@ if __name__ == "__main__":
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed setup git credentials")
             raise Exception
@@ -250,7 +250,7 @@ if __name__ == "__main__":
                 .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
                         notebook_config['resource_group_name'], notebook_config['instance_name'])
             try:
-                subprocess.run("~/scripts/{}.py {}".format('common_remove_remote_kernels', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('common_remove_remote_kernels', params), shell=True, check=True)
             except:
                 traceback.print_exc()
                 raise Exception
@@ -273,7 +273,7 @@ if __name__ == "__main__":
                                                         notebook_config['expected_image_name'],
                                                         json.dumps(notebook_config['image_tags']))
                 print("Image was successfully created.")
-                subprocess.run("~/scripts/{}.py".format('common_prepare_notebook'), shell=True)
+                subprocess.run("~/scripts/{}.py".format('common_prepare_notebook'), shell=True, check=True)
                 instance_running = False
                 while not instance_running:
                     if AzureMeta.get_instance_status(notebook_config['resource_group_name'],
@@ -289,7 +289,7 @@ if __name__ == "__main__":
                 params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}" \
                     .format(instance_hostname, notebook_config['instance_name'], keyfile_name,
                             json.dumps(additional_config), notebook_config['datalab_ssh_user'])
-                subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
         except Exception as err:
             datalab.fab.append_result("Failed creating image.", str(err))
             AzureActions.remove_instance(notebook_config['resource_group_name'], notebook_config['instance_name'])
@@ -315,7 +315,7 @@ if __name__ == "__main__":
                     notebook_config['exploratory_name'],
                     json.dumps(additional_info))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/azure/edge_configure.py b/infrastructure-provisioning/src/general/scripts/azure/edge_configure.py
index 1329488..b7ca24e 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/edge_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/edge_configure.py
@@ -157,7 +157,7 @@ if __name__ == "__main__":
             edge_conf['initial_user'], edge_conf['datalab_ssh_user'], edge_conf['sudo_group'])
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -173,7 +173,7 @@ if __name__ == "__main__":
             edge_conf['instance_hostname'], edge_conf['keyfile_name'], edge_conf['datalab_ssh_user'],
             os.environ['azure_region'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -198,7 +198,7 @@ if __name__ == "__main__":
             edge_conf['instance_hostname'], edge_conf['keyfile_name'], json.dumps(additional_config),
             edge_conf['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_http_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_http_proxy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -217,7 +217,7 @@ if __name__ == "__main__":
             edge_conf['instance_hostname'], edge_conf['keyfile_name'], json.dumps(additional_config),
             edge_conf['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -237,7 +237,7 @@ if __name__ == "__main__":
             edge_conf['keycloak_client_secret'], edge_conf['step_cert_sans'])
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_nginx_reverse_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_nginx_reverse_proxy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -254,7 +254,7 @@ if __name__ == "__main__":
                            edge_conf['keycloak_client_secret'], edge_conf['instance_hostname'], edge_conf['project_name'],
                            edge_conf['endpoint_name'], edge_conf['edge_hostname'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_keycloak', keycloak_params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_keycloak', keycloak_params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/azure/edge_prepare.py b/infrastructure-provisioning/src/general/scripts/azure/edge_prepare.py
index b42dcb2..528c8ab 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/edge_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/edge_prepare.py
@@ -112,7 +112,7 @@ if __name__ == "__main__":
             format(edge_conf['resource_group_name'], edge_conf['vpc_name'], edge_conf['region'], edge_conf['vpc_cidr'],
                    edge_conf['private_subnet_name'], edge_conf['private_subnet_prefix'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_subnet', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_subnet', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -392,7 +392,7 @@ if __name__ == "__main__":
             format(edge_conf['resource_group_name'], edge_conf['edge_security_group_name'], edge_conf['region'],
                    json.dumps(edge_conf['instance_tags']), json.dumps(edge_list_rules))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_security_group', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_security_group', params), shell=True, check=True)
         except Exception as err:
             AzureActions().remove_subnet(edge_conf['resource_group_name'], edge_conf['vpc_name'],
                                          edge_conf['private_subnet_name'])
@@ -495,7 +495,7 @@ if __name__ == "__main__":
             format(edge_conf['resource_group_name'], edge_conf['notebook_security_group_name'], edge_conf['region'],
                    json.dumps(edge_conf['instance_tags']), json.dumps(notebook_list_rules))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_security_group', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_security_group', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -601,7 +601,7 @@ if __name__ == "__main__":
             edge_conf['resource_group_name'], edge_conf['master_security_group_name'], edge_conf['region'],
             json.dumps(edge_conf['instance_tags']), json.dumps(cluster_list_rules))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_security_group', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_security_group', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -627,7 +627,7 @@ if __name__ == "__main__":
             edge_conf['resource_group_name'], edge_conf['slave_security_group_name'], edge_conf['region'],
             json.dumps(edge_conf['instance_tags']), json.dumps(cluster_list_rules))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_security_group', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_security_group', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -656,7 +656,7 @@ if __name__ == "__main__":
             format(edge_conf['edge_container_name'], json.dumps(edge_conf['storage_account_tags']),
                    edge_conf['resource_group_name'], edge_conf['region'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_storage_account', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_storage_account', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -685,7 +685,7 @@ if __name__ == "__main__":
                        edge_conf['datalake_user_directory_name'], edge_conf['azure_ad_user_name'],
                        edge_conf['service_base_name'])
             try:
-                subprocess.run("~/scripts/{}.py {}".format('common_create_datalake_directory', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('common_create_datalake_directory', params), shell=True, check=True)
             except:
                 traceback.print_exc()
                 raise Exception
@@ -733,7 +733,7 @@ if __name__ == "__main__":
                    edge_conf['primary_disk_size'], 'edge', edge_conf['user_name'], edge_conf['instance_storage_account_type'],
                    edge_conf['image_name'], json.dumps(edge_conf['instance_tags']))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_instance', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_instance', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/azure/edge_status.py b/infrastructure-provisioning/src/general/scripts/azure/edge_status.py
index 23ab6c2..6f41654 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/edge_status.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/edge_status.py
@@ -49,7 +49,7 @@ if __name__ == "__main__":
         params = '--resource_group_name {} --list_resources "{}"'.format(edge_conf['resource_group_name'],
                                                                          os.environ['edge_list_resources'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_collect_data', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_collect_data', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/azure/jupyter_configure.py b/infrastructure-provisioning/src/general/scripts/azure/jupyter_configure.py
index 0028e39..b392790 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/jupyter_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/jupyter_configure.py
@@ -140,7 +140,7 @@ if __name__ == "__main__":
             notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -158,7 +158,7 @@ if __name__ == "__main__":
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
                     notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -175,7 +175,7 @@ if __name__ == "__main__":
             format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['azure_region'],
                    edge_instance_private_hostname)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -199,7 +199,7 @@ if __name__ == "__main__":
                    os.environ['notebook_scala_version'], os.environ['notebook_r_mirror'],
                    notebook_config['ip_address'], notebook_config['exploratory_name'], edge_hostname)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_jupyter_node', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_jupyter_node', params), shell=True, check=True)
             datalab.actions_lib.remount_azure_disk(True, notebook_config['datalab_ssh_user'], instance_hostname,
                                                    os.environ['conf_key_dir'] + os.environ['conf_key_name'] + ".pem")
         except:
@@ -218,7 +218,7 @@ if __name__ == "__main__":
         params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
             instance_hostname, keyfile_name, json.dumps(additional_config), notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed installing users key")
             raise Exception
@@ -233,8 +233,8 @@ if __name__ == "__main__":
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
         try:
-            # subprocess.run("~/scripts/{}.py {}".format('common_download_git_certfile', params), shell=True)
-            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True)
+            # subprocess.run("~/scripts/{}.py {}".format('common_download_git_certfile', params), shell=True, check=True)
+            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed setup git credentials")
             raise Exception
@@ -251,7 +251,7 @@ if __name__ == "__main__":
                 .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
                         notebook_config['resource_group_name'], notebook_config['instance_name'])
             try:
-                subprocess.run("~/scripts/{}.py {}".format('common_remove_remote_kernels', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('common_remove_remote_kernels', params), shell=True, check=True)
             except:
                 traceback.print_exc()
                 raise Exception
@@ -275,7 +275,7 @@ if __name__ == "__main__":
                                                         notebook_config['expected_image_name'],
                                                         json.dumps(notebook_config['image_tags']))
                 print("Image was successfully created.")
-                subprocess.run("~/scripts/{}.py".format('common_prepare_notebook'), shell=True)
+                subprocess.run("~/scripts/{}.py".format('common_prepare_notebook'), shell=True, check=True)
                 instance_running = False
                 while not instance_running:
                     if AzureMeta.get_instance_status(notebook_config['resource_group_name'],
@@ -291,7 +291,7 @@ if __name__ == "__main__":
                 params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}" \
                     .format(instance_hostname, notebook_config['instance_name'], keyfile_name,
                             json.dumps(additional_config), notebook_config['datalab_ssh_user'])
-                subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
         except Exception as err:
             datalab.fab.append_result("Failed creating image.", str(err))
             AzureActions.remove_instance(notebook_config['resource_group_name'], notebook_config['instance_name'])
@@ -317,7 +317,7 @@ if __name__ == "__main__":
                     notebook_config['exploratory_name'],
                     json.dumps(additional_info))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/azure/jupyterlab_configure.py b/infrastructure-provisioning/src/general/scripts/azure/jupyterlab_configure.py
index 74c573b..8c694e1 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/jupyterlab_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/jupyterlab_configure.py
@@ -140,7 +140,7 @@ if __name__ == "__main__":
              notebook_config['datalab_ssh_user'], sudo_group)
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -158,7 +158,7 @@ if __name__ == "__main__":
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
                     notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -175,7 +175,7 @@ if __name__ == "__main__":
             format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['azure_region'],
                    edge_instance_private_hostname)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -199,7 +199,7 @@ if __name__ == "__main__":
                    os.environ['notebook_scala_version'], os.environ['notebook_r_mirror'],
                    notebook_config['ip_address'], notebook_config['exploratory_name'], edge_hostname)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_jupyterlab_node', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_jupyterlab_node', params), shell=True, check=True)
             datalab.actions_lib.remount_azure_disk(True, notebook_config['datalab_ssh_user'], instance_hostname,
                                                    os.environ['conf_key_dir'] + os.environ['conf_key_name'] + ".pem")
         except:
@@ -217,7 +217,7 @@ if __name__ == "__main__":
         params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
             instance_hostname, keyfile_name, json.dumps(additional_config), notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed installing users key")
             raise Exception
@@ -232,8 +232,8 @@ if __name__ == "__main__":
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
         try:
-            # subprocess.run("~/scripts/{}.py {}".format('common_download_git_certfile', params), shell=True)
-            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True)
+            # subprocess.run("~/scripts/{}.py {}".format('common_download_git_certfile', params), shell=True, check=True)
+            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed setup git credentials")
             raise Exception
@@ -256,7 +256,7 @@ if __name__ == "__main__":
                                                         notebook_config['expected_image_name'],
                                                         json.dumps(notebook_config['image_tags']))
                 print("Image was successfully created.")
-                subprocess.run("~/scripts/{}.py".format('common_prepare_notebook'), shell=True)
+                subprocess.run("~/scripts/{}.py".format('common_prepare_notebook'), shell=True, check=True)
                 instance_running = False
                 while not instance_running:
                     if AzureMeta.get_instance_status(notebook_config['resource_group_name'],
@@ -272,7 +272,7 @@ if __name__ == "__main__":
                 params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}" \
                     .format(instance_hostname, notebook_config['instance_name'], keyfile_name,
                             json.dumps(additional_config), notebook_config['datalab_ssh_user'])
-                subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
         except Exception as err:
             datalab.fab.append_result("Failed creating image from notebook.", str(err))
             AzureActions.remove_instance(notebook_config['resource_group_name'], notebook_config['instance_name'])
@@ -298,7 +298,7 @@ if __name__ == "__main__":
                     notebook_config['exploratory_name'],
                     json.dumps(additional_info))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
@@ -317,7 +317,7 @@ if __name__ == "__main__":
                    keyfile_name,
                    notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/configure_proxy_for_docker.py {}".format(params), shell=True)
+            subprocess.run("~/scripts/configure_proxy_for_docker.py {}".format(params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -337,7 +337,7 @@ if __name__ == "__main__":
                    keyfile_name,
                    notebook_config['datalab_ssh_user'])
         try:
-           subprocess.run("~/scripts/jupyterlab_container_start.py {}".format(params), shell=True)
+           subprocess.run("~/scripts/jupyterlab_container_start.py {}".format(params), shell=True, check=True)
         except:
              traceback.print_exc()
              raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/azure/project_prepare.py b/infrastructure-provisioning/src/general/scripts/azure/project_prepare.py
index b7e67ae..0fee636 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/project_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/project_prepare.py
@@ -132,7 +132,7 @@ if __name__ == "__main__":
             project_conf['user_key'] = os.environ['key']
             try:
                 subprocess.run('echo "{0}" >> {1}{2}.pub'.format(project_conf['user_key'], os.environ['conf_key_dir'],
-                                                        project_conf['project_name']), shell=True)
+                                                        project_conf['project_name']), shell=True, check=True)
             except:
                 print("ADMINSs PUBLIC KEY DOES NOT INSTALLED")
         except KeyError:
@@ -153,7 +153,7 @@ if __name__ == "__main__":
             format(project_conf['resource_group_name'], project_conf['vpc_name'], project_conf['region'],
                    project_conf['vpc_cidr'], project_conf['private_subnet_name'], project_conf['private_subnet_prefix'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_subnet', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_subnet', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -207,7 +207,7 @@ if __name__ == "__main__":
                 format(project_conf['resource_group_name'], os.environ['azure_edge_security_group_name'],
                        project_conf['region'], json.dumps({"product": "datalab"}), json.dumps(edge_list_rules))
             try:
-                subprocess.run("~/scripts/{}.py {}".format('common_create_security_group', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('common_create_security_group', params), shell=True, check=True)
             except Exception as err:
                 AzureActions.remove_subnet(project_conf['resource_group_name'], project_conf['vpc_name'],
                                            project_conf['private_subnet_name'])
@@ -484,7 +484,7 @@ if __name__ == "__main__":
                 format(project_conf['resource_group_name'], project_conf['edge_security_group_name'],
                        project_conf['region'], json.dumps(project_conf['instance_tags']), json.dumps(edge_list_rules))
             try:
-                subprocess.run("~/scripts/{}.py {}".format('common_create_security_group', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('common_create_security_group', params), shell=True, check=True)
             except Exception as err:
                 AzureActions.remove_subnet(project_conf['resource_group_name'], project_conf['vpc_name'],
                                            project_conf['private_subnet_name'])
@@ -590,7 +590,7 @@ if __name__ == "__main__":
             format(project_conf['resource_group_name'], project_conf['notebook_security_group_name'],
                    project_conf['region'], json.dumps(project_conf['instance_tags']), json.dumps(notebook_list_rules))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_security_group', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_security_group', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -698,7 +698,7 @@ if __name__ == "__main__":
             project_conf['resource_group_name'], project_conf['master_security_group_name'], project_conf['region'],
             json.dumps(project_conf['instance_tags']), json.dumps(cluster_list_rules))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_security_group', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_security_group', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -725,7 +725,7 @@ if __name__ == "__main__":
             project_conf['resource_group_name'], project_conf['slave_security_group_name'], project_conf['region'],
             json.dumps(project_conf['instance_tags']), json.dumps(cluster_list_rules))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_security_group', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_security_group', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -753,7 +753,7 @@ if __name__ == "__main__":
         params = "--container_name {} --account_tags '{}' --resource_group_name {} --region {}". \
             format(project_conf['shared_container_name'], json.dumps(project_conf['shared_storage_account_tags']),
                    project_conf['resource_group_name'], project_conf['region'])
-        subprocess.run("~/scripts/{}.py {}".format('common_create_storage_account', params), shell=True)
+        subprocess.run("~/scripts/{}.py {}".format('common_create_storage_account', params), shell=True, check=True)
     except Exception as err:
         datalab.fab.append_result("Failed to create storage account.", str(err))
         AzureActions.remove_subnet(project_conf['resource_group_name'], project_conf['vpc_name'],
@@ -780,7 +780,7 @@ if __name__ == "__main__":
             format(project_conf['edge_container_name'], json.dumps(project_conf['storage_account_tags']),
                    project_conf['resource_group_name'], project_conf['region'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_storage_account', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_storage_account', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -815,7 +815,7 @@ if __name__ == "__main__":
                                                      project_conf['azure_ad_user_name'],
                                                      project_conf['service_base_name'])
             try:
-                subprocess.run("~/scripts/{}.py {}".format('common_create_datalake_directory', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('common_create_datalake_directory', params), shell=True, check=True)
             except:
                 traceback.print_exc()
                 raise Exception
@@ -871,7 +871,7 @@ if __name__ == "__main__":
                    project_conf['instance_storage_account_type'],
                    project_conf['image_name'], json.dumps(project_conf['instance_tags']))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_instance', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_instance', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/azure/rstudio_configure.py b/infrastructure-provisioning/src/general/scripts/azure/rstudio_configure.py
index d472c0a..7bc02e6 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/rstudio_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/rstudio_configure.py
@@ -141,7 +141,7 @@ if __name__ == "__main__":
             notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -159,7 +159,7 @@ if __name__ == "__main__":
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
                     notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -176,7 +176,7 @@ if __name__ == "__main__":
             format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['azure_region'],
                    edge_instance_private_hostname)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -199,7 +199,7 @@ if __name__ == "__main__":
                     os.environ['notebook_r_mirror'], notebook_config['ip_address'],
                     notebook_config['exploratory_name'], edge_hostname)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_rstudio_node', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_rstudio_node', params), shell=True, check=True)
             datalab.actions_lib.remount_azure_disk(True, notebook_config['datalab_ssh_user'], instance_hostname,
                                                    os.environ['conf_key_dir'] + os.environ['conf_key_name'] + ".pem")
         except:
@@ -218,7 +218,7 @@ if __name__ == "__main__":
         params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
             instance_hostname, keyfile_name, json.dumps(additional_config), notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -233,7 +233,7 @@ if __name__ == "__main__":
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed setup git credentials")
             raise Exception
@@ -250,7 +250,7 @@ if __name__ == "__main__":
                 .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
                         notebook_config['resource_group_name'], notebook_config['instance_name'])
             try:
-                subprocess.run("~/scripts/{}.py {}".format('common_remove_remote_kernels', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('common_remove_remote_kernels', params), shell=True, check=True)
             except:
                 traceback.print_exc()
                 raise Exception
@@ -274,7 +274,7 @@ if __name__ == "__main__":
                                                         notebook_config['expected_image_name'],
                                                         json.dumps(notebook_config['image_tags']))
                 print("Image was successfully created.")
-                subprocess.run("~/scripts/{}.py".format('common_prepare_notebook'), shell=True)
+                subprocess.run("~/scripts/{}.py".format('common_prepare_notebook'), shell=True, check=True)
                 instance_running = False
                 while not instance_running:
                     if AzureMeta.get_instance_status(notebook_config['resource_group_name'],
@@ -290,7 +290,7 @@ if __name__ == "__main__":
                 params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}" \
                     .format(instance_hostname, notebook_config['instance_name'], keyfile_name,
                             json.dumps(additional_config), notebook_config['datalab_ssh_user'])
-                subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
         except Exception as err:
             datalab.fab.append_result("Failed creating image.", str(err))
             AzureActions.remove_instance(notebook_config['resource_group_name'], notebook_config['instance_name'])
@@ -316,7 +316,7 @@ if __name__ == "__main__":
                     notebook_config['exploratory_name'],
                     json.dumps(additional_info))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/azure/ssn_configure.py b/infrastructure-provisioning/src/general/scripts/azure/ssn_configure.py
index 56cc897..3809db5 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/ssn_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/ssn_configure.py
@@ -139,7 +139,7 @@ if __name__ == "__main__":
             (ssn_conf['instance_host'], ssn_conf['ssh_key_path'], ssn_conf['initial_user'],
              ssn_conf['datalab_ssh_user'],
              ssn_conf['sudo_group'])
-        subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
+        subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         clear_resources()
@@ -154,7 +154,7 @@ if __name__ == "__main__":
                                                                        ssn_conf['ssh_key_path'],
                                                                        ssn_conf['datalab_ssh_user'],
                                                                        ssn_conf['region'])
-        subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True)
+        subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         clear_resources()
@@ -173,7 +173,7 @@ if __name__ == "__main__":
             format(ssn_conf['instance_host'], ssn_conf['ssh_key_path'], json.dumps(additional_config),
                    ssn_conf['datalab_ssh_user'], os.environ['ssn_datalab_path'], ssn_conf['service_base_name'],
                    ssn_conf['step_cert_sans'])
-        subprocess.run("~/scripts/{}.py {}".format('configure_ssn_node', params), shell=True)
+        subprocess.run("~/scripts/{}.py {}".format('configure_ssn_node', params), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         clear_resources()
@@ -198,7 +198,7 @@ if __name__ == "__main__":
                                                           json.dumps(additional_config), os.environ['conf_os_family'],
                                                           ssn_conf['datalab_ssh_user'], os.environ['ssn_datalab_path'],
                                                           os.environ['conf_cloud_provider'], ssn_conf['region'])
-        subprocess.run("~/scripts/{}.py {}".format('configure_docker', params), shell=True)
+        subprocess.run("~/scripts/{}.py {}".format('configure_docker', params), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         clear_resources()
@@ -509,7 +509,7 @@ if __name__ == "__main__":
                    ssn_conf['default_endpoint_name'],
                    os.environ['keycloak_client_name'], os.environ['keycloak_client_secret'],
                    os.environ['keycloak_auth_server_url'])
-        subprocess.run("~/scripts/{}.py {}".format('configure_ui', params), shell=True)
+        subprocess.run("~/scripts/{}.py {}".format('configure_ui', params), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         clear_resources()
@@ -584,7 +584,7 @@ if __name__ == "__main__":
         print('Upload response file')
         params = "--instance_name {} --local_log_filepath {} --os_user {} --instance_hostname {}". \
             format(ssn_conf['instance_name'], local_log_filepath, ssn_conf['datalab_ssh_user'], ssn_conf['instnace_ip'])
-        subprocess.run("~/scripts/{}.py {}".format('upload_response_file', params), shell=True)
+        subprocess.run("~/scripts/{}.py {}".format('upload_response_file', params), shell=True, check=True)
     except Exception as err:
         datalab.fab.append_result("Error with writing results.", str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/azure/ssn_prepare.py b/infrastructure-provisioning/src/general/scripts/azure/ssn_prepare.py
index d78fb48..9e9c360 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/ssn_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/ssn_prepare.py
@@ -111,7 +111,7 @@ if __name__ == "__main__":
             logging.info('[CREATING RESOURCE GROUP]')
             print("[CREATING RESOURCE GROUP]")
             params = "--resource_group_name {} --region {}".format(ssn_conf['resource_group_name'], ssn_conf['region'])
-            subprocess.run("~/scripts/{}.py {}".format('ssn_create_resource_group', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('ssn_create_resource_group', params), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         datalab.fab.append_result("Failed to create Resource Group.", str(err))
@@ -126,7 +126,7 @@ if __name__ == "__main__":
             print("[CREATING VIRTUAL NETWORK]")
             params = "--resource_group_name {} --vpc_name {} --region {} --vpc_cidr {}".format(
                 ssn_conf['resource_group_name'], ssn_conf['vpc_name'], ssn_conf['region'], ssn_conf['vpc_cidr'])
-            subprocess.run("~/scripts/{}.py {}".format('ssn_create_vpc', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('ssn_create_vpc', params), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         datalab.fab.append_result("Failed to create VPC.", str(err))
@@ -147,7 +147,7 @@ if __name__ == "__main__":
             params = "--resource_group_name {} --vpc_name {} --region {} --vpc_cidr {} --subnet_name {} --prefix {}".\
                 format(ssn_conf['resource_group_name'], ssn_conf['vpc_name'], ssn_conf['region'],
                        ssn_conf['vpc_cidr'], ssn_conf['subnet_name'], ssn_conf['subnet_prefix'])
-            subprocess.run("~/scripts/{}.py {}".format('common_create_subnet', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_subnet', params), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         datalab.fab.append_result("Failed to create Subnet.", str(err))
@@ -169,7 +169,7 @@ if __name__ == "__main__":
                      "--source_virtual_network_name {} --destination_virtual_network_name {}".format(
                       ssn_conf['source_resource_group_name'], ssn_conf['resource_group_name'],
                       os.environ['azure_source_vpc_name'], ssn_conf['vpc_name'])
-            subprocess.run("~/scripts/{}.py {}".format('ssn_create_peering', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('ssn_create_peering', params), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         try:
@@ -239,7 +239,7 @@ if __name__ == "__main__":
             params = "--resource_group_name {} --security_group_name {} --region {} --tags '{}'  --list_rules '{}'".\
                 format(ssn_conf['resource_group_name'], ssn_conf['security_group_name'], ssn_conf['region'],
                        json.dumps(ssn_conf['instance_tags']), json.dumps(list_rules))
-            subprocess.run("~/scripts/{}.py {}".format('common_create_security_group', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_security_group', params), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         datalab.fab.append_result("Error creating Security group", str(err))
@@ -264,7 +264,7 @@ if __name__ == "__main__":
                      format(ssn_conf['datalake_store_name'], json.dumps(ssn_conf['datalake_store_tags']),
                             ssn_conf['resource_group_name'], ssn_conf['region'])
             try:
-                subprocess.run("~/scripts/{}.py {}".format('ssn_create_datalake', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('ssn_create_datalake', params), shell=True, check=True)
             except:
                 traceback.print_exc()
                 raise Exception
@@ -276,7 +276,7 @@ if __name__ == "__main__":
                        ssn_conf['datalake_shared_directory_name'], ssn_conf['service_base_name'],
                        os.environ['azure_ad_group_id'])
             try:
-                subprocess.run("~/scripts/{}.py {}".format('common_create_datalake_directory', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('common_create_datalake_directory', params), shell=True, check=True)
             except:
                 traceback.print_exc()
                 raise Exception
@@ -317,7 +317,7 @@ if __name__ == "__main__":
                    initial_user, ssn_conf['static_public_ip_name'], ssn_conf['public_ssh_key'],
                    ssn_conf['primary_disk_size'], 'ssn', ssn_conf['instance_storage_account_type'],
                    ssn_conf['ssn_image_name'], json.dumps(ssn_conf['instance_tags']))
-        subprocess.run("~/scripts/{}.py {}".format('common_create_instance', params), shell=True)
+        subprocess.run("~/scripts/{}.py {}".format('common_create_instance', params), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         datalab.fab.append_result("Failed to create instance.", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/azure/tensor_configure.py b/infrastructure-provisioning/src/general/scripts/azure/tensor_configure.py
index fc9d12d..70821e8 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/tensor_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/tensor_configure.py
@@ -142,7 +142,7 @@ if __name__ == "__main__":
             notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -160,7 +160,7 @@ if __name__ == "__main__":
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
                     notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -177,7 +177,7 @@ if __name__ == "__main__":
             format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['azure_region'],
                    edge_instance_private_hostname)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -197,7 +197,7 @@ if __name__ == "__main__":
                          os.environ['azure_region'], notebook_config['datalab_ssh_user'],
                          notebook_config['ip_address'], notebook_config['exploratory_name'], edge_hostname)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_tensor_node', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_tensor_node', params), shell=True, check=True)
             datalab.actions_lib.remount_azure_disk(True, notebook_config['datalab_ssh_user'], instance_hostname,
                                                    os.environ['conf_key_dir'] + os.environ['conf_key_name'] + ".pem")
         except:
@@ -216,7 +216,7 @@ if __name__ == "__main__":
         params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
             instance_hostname, keyfile_name, json.dumps(additional_config), notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -231,7 +231,7 @@ if __name__ == "__main__":
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed setup git credentials")
             raise Exception
@@ -248,7 +248,7 @@ if __name__ == "__main__":
                 .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
                         notebook_config['resource_group_name'], notebook_config['instance_name'])
             try:
-                subprocess.run("~/scripts/{}.py {}".format('common_remove_remote_kernels', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('common_remove_remote_kernels', params), shell=True, check=True)
             except:
                 traceback.print_exc()
                 raise Exception
@@ -272,7 +272,7 @@ if __name__ == "__main__":
                                                         notebook_config['expected_image_name'],
                                                         json.dumps(notebook_config['image_tags']))
                 print("Image was successfully created.")
-                subprocess.run("~/scripts/{}.py".format('common_prepare_notebook'), shell=True)
+                subprocess.run("~/scripts/{}.py".format('common_prepare_notebook'), shell=True, check=True)
                 instance_running = False
                 while not instance_running:
                     if AzureMeta.get_instance_status(notebook_config['resource_group_name'],
@@ -288,7 +288,7 @@ if __name__ == "__main__":
                 params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}" \
                     .format(instance_hostname, notebook_config['instance_name'], keyfile_name,
                             json.dumps(additional_config), notebook_config['datalab_ssh_user'])
-                subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
         except Exception as err:
             datalab.fab.append_result("Failed creating image.", str(err))
             AzureActions.remove_instance(notebook_config['resource_group_name'], notebook_config['instance_name'])
@@ -314,7 +314,7 @@ if __name__ == "__main__":
                     notebook_config['exploratory_name'],
                     json.dumps(additional_info))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/azure/zeppelin_configure.py b/infrastructure-provisioning/src/general/scripts/azure/zeppelin_configure.py
index 43ddfec..c505cda 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/zeppelin_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/zeppelin_configure.py
@@ -141,7 +141,7 @@ if __name__ == "__main__":
             notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -159,7 +159,7 @@ if __name__ == "__main__":
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
                     notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -176,7 +176,7 @@ if __name__ == "__main__":
             .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['azure_region'],
                     edge_instance_private_hostname)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -210,7 +210,7 @@ if __name__ == "__main__":
                     os.environ['notebook_multiple_clusters'], os.environ['notebook_r_mirror'], 'null',
                     notebook_config['ip_address'], notebook_config['exploratory_name'], edge_hostname)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_zeppelin_node', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_zeppelin_node', params), shell=True, check=True)
             datalab.actions_lib.remount_azure_disk(True, notebook_config['datalab_ssh_user'], instance_hostname,
                                                    os.environ['conf_key_dir'] + os.environ['conf_key_name'] + ".pem")
         except:
@@ -229,7 +229,7 @@ if __name__ == "__main__":
         params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
             instance_hostname, keyfile_name, json.dumps(additional_config), notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -244,7 +244,7 @@ if __name__ == "__main__":
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed setup git credentials")
             raise Exception
@@ -261,7 +261,7 @@ if __name__ == "__main__":
                 .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
                         notebook_config['resource_group_name'], notebook_config['instance_name'])
             try:
-                subprocess.run("~/scripts/{}.py {}".format('common_remove_remote_kernels', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('common_remove_remote_kernels', params), shell=True, check=True)
             except:
                 traceback.print_exc()
                 raise Exception
@@ -285,7 +285,7 @@ if __name__ == "__main__":
                                                         notebook_config['expected_image_name'],
                                                         json.dumps(notebook_config['image_tags']))
                 print("Image was successfully created.")
-                subprocess.run("~/scripts/{}.py".format('common_prepare_notebook'), shell=True)
+                subprocess.run("~/scripts/{}.py".format('common_prepare_notebook'), shell=True, check=True)
                 instance_running = False
                 while not instance_running:
                     if AzureMeta.get_instance_status(notebook_config['resource_group_name'],
@@ -301,7 +301,7 @@ if __name__ == "__main__":
                 params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}" \
                     .format(instance_hostname, notebook_config['instance_name'], keyfile_name,
                             json.dumps(additional_config), notebook_config['datalab_ssh_user'])
-                subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
         except Exception as err:
             datalab.fab.append_result("Failed creating image.", str(err))
             AzureActions.remove_instance(notebook_config['resource_group_name'], notebook_config['instance_name'])
@@ -327,7 +327,7 @@ if __name__ == "__main__":
                     notebook_config['exploratory_name'],
                     json.dumps(additional_info))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_notebook_configure_dataengine-service.py b/infrastructure-provisioning/src/general/scripts/gcp/common_notebook_configure_dataengine-service.py
index a451685..a79a4c4 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_notebook_configure_dataengine-service.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_notebook_configure_dataengine-service.py
@@ -107,7 +107,7 @@ if __name__ == "__main__":
                     edge_instance_hostname, '3128', os.environ['notebook_scala_version'], os.environ['application'],
                     os.environ['conf_pypi_mirror'])
         try:
-            subprocess.run("~/scripts/{}_{}.py {}".format(application, 'install_dataengine-service_kernels', params), shell=True)
+            subprocess.run("~/scripts/{}_{}.py {}".format(application, 'install_dataengine-service_kernels', params), shell=True, check=True)
             GCPActions.update_dataproc_cluster(notebook_config['cluster_name'], notebook_config['cluster_labels'])
         except:
             traceback.print_exc()
@@ -127,7 +127,7 @@ if __name__ == "__main__":
                     notebook_config['key_path'],
                     os.environ['conf_os_user'])
         try:
-            subprocess.run("~/scripts/{0}.py {1}".format('common_configure_spark', params), shell=True)
+            subprocess.run("~/scripts/{0}.py {1}".format('common_configure_spark', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_notebook_configure_dataengine.py b/infrastructure-provisioning/src/general/scripts/gcp/common_notebook_configure_dataengine.py
index 291d282..8342d76 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_notebook_configure_dataengine.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_notebook_configure_dataengine.py
@@ -100,7 +100,7 @@ if __name__ == "__main__":
                    notebook_config['spark_master_url'], notebook_config['key_path'],
                    notebook_config['notebook_ip'], notebook_config['spark_master_ip'])
         try:
-            subprocess.run("~/scripts/{}_{}.py {}".format(os.environ['application'], 'install_dataengine_kernels', params), shell=True)
+            subprocess.run("~/scripts/{}_{}.py {}".format(os.environ['application'], 'install_dataengine_kernels', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -121,7 +121,7 @@ if __name__ == "__main__":
                     notebook_config['datalab_ssh_user'],
                     notebook_config['cluster_name'])
         try:
-            subprocess.run("~/scripts/{0}.py {1}".format('common_configure_spark', params), shell=True)
+            subprocess.run("~/scripts/{0}.py {1}".format('common_configure_spark', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_prepare_notebook.py b/infrastructure-provisioning/src/general/scripts/gcp/common_prepare_notebook.py
index 87b0277..273dc23 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_prepare_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_prepare_notebook.py
@@ -192,7 +192,7 @@ if __name__ == "__main__":
                    notebook_config['network_tag'], json.dumps(notebook_config['labels']),
                    notebook_config['service_base_name'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_instance', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_instance', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_reupload_key.py b/infrastructure-provisioning/src/general/scripts/gcp/common_reupload_key.py
index 6721c0b..0119977 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_reupload_key.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_reupload_key.py
@@ -46,7 +46,7 @@ if __name__ == "__main__":
         params = "--user {} --hostname {} --keyfile '{}' --additional_config '{}'".format(
             args.os_user, ip, args.keyfile, args.additional_config)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except Exception as err:
             print('Error: {0}'.format(err))
             sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_start_notebook.py b/infrastructure-provisioning/src/general/scripts/gcp/common_start_notebook.py
index 734cd44..fe45998 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_start_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_start_notebook.py
@@ -69,7 +69,7 @@ if __name__ == "__main__":
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(os.environ['conf_os_user'], notebook_config['notebook_ip'], notebook_config['keyfile'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True, check=True)
         except Exception as err:
             traceback.print_exc()
             datalab.fab.append_result("Failed to setup git credentials.", str(err))
@@ -83,7 +83,7 @@ if __name__ == "__main__":
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(os.environ['conf_os_user'], notebook_config['notebook_ip'], notebook_config['keyfile'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('update_inactivity_on_start', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('update_inactivity_on_start', params), shell=True, check=True)
         except Exception as err:
             traceback.print_exc()
             datalab.fab.append_result("Failed to update last activity time.", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_configure.py
index 21932bb..34d0e8b 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_configure.py
@@ -46,7 +46,7 @@ def configure_dataengine_service(instance, dataproc_conf):
             .format(dataproc_conf['instance_ip'], dataproc_conf['cluster_name'], dataproc_conf['key_path'],
                     json.dumps(additional_config), dataproc_conf['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -104,7 +104,7 @@ def configure_dataengine_service(instance, dataproc_conf):
                     dataproc_conf['exploratory_name'],
                     json.dumps(additional_info))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_install_libs.py b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_install_libs.py
index 9027269..bcc2eb7 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_install_libs.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_install_libs.py
@@ -40,7 +40,7 @@ def install_libs(instance, data_engine):
                 data_engine['keyfile'], data_engine['libs'])
     try:
         # Run script to install additional libs
-        subprocess.run("~/scripts/{}.py {}".format('install_additional_libs', params), shell=True)
+        subprocess.run("~/scripts/{}.py {}".format('install_additional_libs', params), shell=True, check=True)
     except:
         traceback.print_exc()
         raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_list_libs.py b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_list_libs.py
index d84b3e0..55af8b5 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_list_libs.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_list_libs.py
@@ -61,7 +61,7 @@ if __name__ == "__main__":
             .format(data_engine['os_user'], data_engine['master_ip'], data_engine['keyfile'], data_engine['group_name'])
         try:
             # Run script to get available libs
-            subprocess.run("~/scripts/{}.py {}".format('get_list_available_pkgs', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('get_list_available_pkgs', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_prepare.py b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_prepare.py
index 5eed753..7da3692 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_prepare.py
@@ -132,13 +132,13 @@ if __name__ == "__main__":
 
     try:
         GCPMeta.dataproc_waiter(dataproc_conf['cluster_labels'])
-        subprocess.run('touch /response/.dataproc_creating_{}'.format(os.environ['exploratory_name']), shell=True)
+        subprocess.run('touch /response/.dataproc_creating_{}'.format(os.environ['exploratory_name']), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         datalab.fab.append_result("Dataproc waiter fail.", str(err))
         sys.exit(1)
 
-    subprocess.run("echo Waiting for changes to propagate; sleep 10", shell=True)
+    subprocess.run("echo Waiting for changes to propagate; sleep 10", shell=True, check=True)
 
     dataproc_cluster = json.loads(open('/root/templates/dataengine-service_cluster.json').read())
     print(dataproc_cluster)
@@ -179,14 +179,14 @@ if __name__ == "__main__":
                                                                    json.dumps(dataproc_cluster))
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('dataengine-service_create', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('dataengine-service_create', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
 
         keyfile_name = "/root/keys/{}.pem".format(dataproc_conf['key_name'])
-        subprocess.run('rm /response/.dataproc_creating_{}'.format(os.environ['exploratory_name']), shell=True)
+        subprocess.run('rm /response/.dataproc_creating_{}'.format(os.environ['exploratory_name']), shell=True, check=True)
     except Exception as err:
         datalab.fab.append_result("Failed to create Dataproc Cluster.", str(err))
-        subprocess.run('rm /response/.dataproc_creating_{}'.format(os.environ['exploratory_name']), shell=True)
+        subprocess.run('rm /response/.dataproc_creating_{}'.format(os.environ['exploratory_name']), shell=True, check=True)
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/dataengine_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/dataengine_configure.py
index 9bbdd5b..8507703 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine_configure.py
@@ -45,7 +45,7 @@ def configure_slave(slave_number, data_engine):
              data_engine['datalab_ssh_user'], sudo_group)
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -63,7 +63,7 @@ def configure_slave(slave_number, data_engine):
             slave_hostname, os.environ['conf_key_dir'] + data_engine['key_name'] + ".pem", json.dumps(
                 additional_config), data_engine['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed installing users key")
             raise Exception
@@ -80,7 +80,7 @@ def configure_slave(slave_number, data_engine):
             .format(slave_hostname, slave_name, keyfile_name, json.dumps(additional_config),
                     data_engine['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -96,7 +96,7 @@ def configure_slave(slave_number, data_engine):
             format(slave_hostname, keyfile_name, data_engine['datalab_ssh_user'], data_engine['region'],
                    edge_instance_private_ip)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -115,7 +115,7 @@ def configure_slave(slave_number, data_engine):
                    os.environ['notebook_scala_version'], os.environ['notebook_r_mirror'], master_node_hostname,
                    'slave')
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_dataengine', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_dataengine', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -219,7 +219,7 @@ if __name__ == "__main__":
              data_engine['datalab_ssh_user'], sudo_group)
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -237,7 +237,7 @@ if __name__ == "__main__":
             master_node_hostname, os.environ['conf_key_dir'] + data_engine['key_name'] + ".pem",
             json.dumps(additional_config), data_engine['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed installing users key")
             raise Exception
@@ -254,7 +254,7 @@ if __name__ == "__main__":
             .format(master_node_hostname, data_engine['master_node_name'], keyfile_name, json.dumps(additional_config),
                     data_engine['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -270,7 +270,7 @@ if __name__ == "__main__":
             format(master_node_hostname, keyfile_name, data_engine['datalab_ssh_user'], data_engine['region'],
                    edge_instance_private_ip)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -289,7 +289,7 @@ if __name__ == "__main__":
                    os.environ['notebook_scala_version'], os.environ['notebook_r_mirror'], master_node_hostname,
                    'master')
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_dataengine', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_dataengine', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -340,7 +340,7 @@ if __name__ == "__main__":
                     data_engine['exploratory_name'],
                     json.dumps(additional_info))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/dataengine_prepare.py b/infrastructure-provisioning/src/general/scripts/gcp/dataengine_prepare.py
index 7c6b55d..64a27c0 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine_prepare.py
@@ -195,7 +195,7 @@ if __name__ == "__main__":
                    data_engine['network_tag'], data_engine['cluster_name'],
                    json.dumps(data_engine['master_labels']), data_engine['service_base_name'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_instance', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_instance', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -223,7 +223,7 @@ if __name__ == "__main__":
                        data_engine['network_tag'], data_engine['cluster_name'],
                        json.dumps(data_engine['slave_labels']), data_engine['service_base_name'])
             try:
-                subprocess.run("~/scripts/{}.py {}".format('common_create_instance', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('common_create_instance', params), shell=True, check=True)
             except:
                 traceback.print_exc()
                 raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/dataengine_start.py b/infrastructure-provisioning/src/general/scripts/gcp/dataengine_start.py
index 27e69dc..7843592 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine_start.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine_start.py
@@ -98,7 +98,7 @@ if __name__ == "__main__":
             .format(os.environ['conf_os_user'], data_engine['notebook_ip'], data_engine['keyfile'],
                     data_engine['computational_ip'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('update_inactivity_on_start', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('update_inactivity_on_start', params), shell=True, check=True)
         except Exception as err:
             traceback.print_exc()
             datalab.fab.append_result("Failed to update last activity time.", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/deeplearning_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/deeplearning_configure.py
index 2388c38..cceb651 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/deeplearning_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/deeplearning_configure.py
@@ -111,7 +111,7 @@ if __name__ == "__main__":
             notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -129,7 +129,7 @@ if __name__ == "__main__":
             .format(instance_hostname, notebook_config['instance_name'], notebook_config['ssh_key_path'],
                     json.dumps(additional_config), notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -146,7 +146,7 @@ if __name__ == "__main__":
             format(instance_hostname, notebook_config['ssh_key_path'], notebook_config['datalab_ssh_user'],
                    os.environ['gcp_region'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -169,7 +169,7 @@ if __name__ == "__main__":
                     os.environ['gcp_region'], os.environ['notebook_r_mirror'],
                     notebook_config['exploratory_name'], edge_instance_private_ip)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_deep_learning_node', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_deep_learning_node', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -187,7 +187,7 @@ if __name__ == "__main__":
             instance_hostname, notebook_config['ssh_key_path'], json.dumps(additional_config),
             notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed installing users key")
             raise Exception
@@ -202,8 +202,8 @@ if __name__ == "__main__":
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, notebook_config['ssh_key_path'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_download_git_certfile', params), shell=True)
-            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_download_git_certfile', params), shell=True, check=True)
+            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed setup git credentials")
             raise Exception
@@ -255,7 +255,7 @@ if __name__ == "__main__":
                     notebook_config['exploratory_name'],
                     json.dumps(additional_info))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/edge_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/edge_configure.py
index d05d351..9450889 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/edge_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/edge_configure.py
@@ -162,7 +162,7 @@ if __name__ == "__main__":
             edge_conf['initial_user'], edge_conf['datalab_ssh_user'], edge_conf['sudo_group'])
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -178,7 +178,7 @@ if __name__ == "__main__":
             edge_conf['instance_hostname'], edge_conf['ssh_key_path'], edge_conf['datalab_ssh_user'],
             os.environ['gcp_region'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -203,7 +203,7 @@ if __name__ == "__main__":
                  .format(edge_conf['instance_hostname'], edge_conf['ssh_key_path'], json.dumps(additional_config),
                          edge_conf['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_http_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_http_proxy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -222,7 +222,7 @@ if __name__ == "__main__":
             edge_conf['instance_hostname'], edge_conf['ssh_key_path'], json.dumps(additional_config),
             edge_conf['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -244,7 +244,7 @@ if __name__ == "__main__":
                                                 edge_conf['keycloak_client_secret'], edge_conf['step_cert_sans'])
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_nginx_reverse_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_nginx_reverse_proxy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -262,7 +262,7 @@ if __name__ == "__main__":
                     edge_conf['keycloak_client_secret'], edge_conf['instance_hostname'], edge_conf['project_name'],
                     edge_conf['endpoint_name'], edge_conf['edge_hostname'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_keycloak', keycloak_params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_keycloak', keycloak_params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -281,7 +281,7 @@ if __name__ == "__main__":
                 edge_conf['instance_hostname'], edge_conf['ssh_key_path'], json.dumps(additional_config),
                 edge_conf['datalab_ssh_user'])
             try:
-                subprocess.run("~/scripts/{}.py {}".format('configure_nftables', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('configure_nftables', params), shell=True, check=True)
             except:
                 traceback.print_exc()
                 raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/edge_status.py b/infrastructure-provisioning/src/general/scripts/gcp/edge_status.py
index 4e8eb71..d7b2dbb 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/edge_status.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/edge_status.py
@@ -44,7 +44,7 @@ if __name__ == "__main__":
         print('[COLLECTING DATA]')
         params = '--list_resources "{}"'.format(os.environ['edge_list_resources'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_collect_data', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_collect_data', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/jupyter_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/jupyter_configure.py
index f029bcd..544e95a 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/jupyter_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/jupyter_configure.py
@@ -111,7 +111,7 @@ if __name__ == "__main__":
             notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -129,7 +129,7 @@ if __name__ == "__main__":
             .format(instance_hostname, notebook_config['instance_name'], notebook_config['ssh_key_path'],
                     json.dumps(additional_config), notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -146,7 +146,7 @@ if __name__ == "__main__":
             format(instance_hostname, notebook_config['ssh_key_path'], notebook_config['datalab_ssh_user'],
                    os.environ['gcp_region'], edge_instance_private_ip)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -171,7 +171,7 @@ if __name__ == "__main__":
                    os.environ['notebook_scala_version'], os.environ['notebook_r_mirror'],
                    notebook_config['exploratory_name'], edge_instance_private_ip)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_jupyter_node', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_jupyter_node', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -189,7 +189,7 @@ if __name__ == "__main__":
             instance_hostname, notebook_config['ssh_key_path'], json.dumps(additional_config),
             notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed installing users key")
             raise Exception
@@ -204,8 +204,8 @@ if __name__ == "__main__":
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, notebook_config['ssh_key_path'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_download_git_certfile', params), shell=True)
-            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_download_git_certfile', params), shell=True, check=True)
+            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed setup git credentials")
             raise Exception
@@ -257,7 +257,7 @@ if __name__ == "__main__":
                     notebook_config['exploratory_name'],
                     json.dumps(additional_info))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/jupyter_dataengine-service_create_configs.py b/infrastructure-provisioning/src/general/scripts/gcp/jupyter_dataengine-service_create_configs.py
index 313d4ef..83ab1c7 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/jupyter_dataengine-service_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/jupyter_dataengine-service_create_configs.py
@@ -57,7 +57,7 @@ yarn_dir = '/opt/{}/{}/conf/'.format(args.dataproc_version, args.cluster_name)
 
 def r_kernel(args):
     spark_path = '/opt/{}/{}/spark/'.format(args.dataproc_version, args.cluster_name)
-    subprocess.run('mkdir -p {}/r_{}/'.format(kernels_dir, args.cluster_name), shell=True)
+    subprocess.run('mkdir -p {}/r_{}/'.format(kernels_dir, args.cluster_name), shell=True, check=True)
     kernel_path = "{}/r_{}/kernel.json".format(kernels_dir, args.cluster_name)
     template_file = "/tmp/r_dataengine-service_template.json"
 
@@ -76,10 +76,10 @@ def r_kernel(args):
 
 def toree_kernel(args):
     spark_path = '/opt/{0}/{1}/spark/'.format(args.dataproc_version, args.cluster_name)
-    subprocess.run('mkdir -p {0}toree_{1}/'.format(kernels_dir, args.cluster_name), shell=True)
-    subprocess.run('tar zxvf /tmp/toree_kernel.tar.gz -C {0}toree_{1}/'.format(kernels_dir, args.cluster_name), shell=True)
-    subprocess.run('sudo mv {0}toree_{1}/toree-0.3.0-incubating/* {0}toree_{1}/'.format(kernels_dir, args.cluster_name), shell=True)
-    subprocess.run('sudo rm -r {0}toree_{1}/toree-0.3.0-incubating'.format(kernels_dir, args.cluster_name), shell=True)
+    subprocess.run('mkdir -p {0}toree_{1}/'.format(kernels_dir, args.cluster_name), shell=True, check=True)
+    subprocess.run('tar zxvf /tmp/toree_kernel.tar.gz -C {0}toree_{1}/'.format(kernels_dir, args.cluster_name), shell=True, check=True)
+    subprocess.run('sudo mv {0}toree_{1}/toree-0.3.0-incubating/* {0}toree_{1}/'.format(kernels_dir, args.cluster_name), shell=True, check=True)
+    subprocess.run('sudo rm -r {0}toree_{1}/toree-0.3.0-incubating'.format(kernels_dir, args.cluster_name), shell=True, check=True)
     kernel_path = '{0}toree_{1}/kernel.json'.format(kernels_dir, args.cluster_name)
     template_file = "/tmp/toree_dataengine-service_templatev2.json"
     with open(template_file, 'r') as f:
@@ -92,12 +92,12 @@ def toree_kernel(args):
     text = text.replace('SCALA_VERSION', args.scala_version)
     with open(kernel_path, 'w') as f:
         f.write(text)
-    subprocess.run('touch /tmp/kernel_var.json', shell=True)
+    subprocess.run('touch /tmp/kernel_var.json', shell=True, check=True)
     subprocess.run(
         "PYJ=`find /opt/" + args.dataproc_version + "/" + args.cluster_name +
         "/spark/ -name '*py4j*.zip' | tr '\\n' ':' | sed 's|:$||g'`; cat " + kernel_path +
-        " | sed 's|PY4J|'$PYJ'|g' > /tmp/kernel_var.json", shell=True)
-    subprocess.run('sudo mv /tmp/kernel_var.json ' + kernel_path, shell=True)
+        " | sed 's|PY4J|'$PYJ'|g' > /tmp/kernel_var.json", shell=True, check=True)
+    subprocess.run('sudo mv /tmp/kernel_var.json ' + kernel_path, shell=True, check=True)
     run_sh_path = kernels_dir + "toree_" + args.cluster_name + "/bin/run.sh"
     template_sh_file = '/tmp/run_template.sh'
     with open(template_sh_file, 'r') as f:
@@ -110,27 +110,27 @@ def toree_kernel(args):
 def install_sparkamagic_kernels(args):
     try:
         subprocess.run('sudo jupyter nbextension enable --py --sys-prefix widgetsnbextension')
-        sparkmagic_dir = subprocess.run("sudo pip3 show sparkmagic | grep 'Location: ' | awk '{print $2}'", capture_output=True, shell=True)
-        subprocess.run('sudo jupyter-kernelspec install {}/sparkmagic/kernels/sparkkernel --user'.format(sparkmagic_dir), shell=True)
-        subprocess.run('sudo jupyter-kernelspec install {}/sparkmagic/kernels/pysparkkernel --user'.format(sparkmagic_dir), shell=True)
-        subprocess.run('sudo jupyter-kernelspec install {}/sparkmagic/kernels/sparkrkernel --user'.format(sparkmagic_dir), shell=True)
+        sparkmagic_dir = subprocess.run("sudo pip3 show sparkmagic | grep 'Location: ' | awk '{print $2}'", capture_output=True, shell=True, check=True)
+        subprocess.run('sudo jupyter-kernelspec install {}/sparkmagic/kernels/sparkkernel --user'.format(sparkmagic_dir), shell=True, check=True)
+        subprocess.run('sudo jupyter-kernelspec install {}/sparkmagic/kernels/pysparkkernel --user'.format(sparkmagic_dir), shell=True, check=True)
+        subprocess.run('sudo jupyter-kernelspec install {}/sparkmagic/kernels/sparkrkernel --user'.format(sparkmagic_dir), shell=True, check=True)
         pyspark_kernel_name = 'PySpark (Python-{0} / Spark-{1} ) [{2}]'.format(args.python_version, args.spark_version,
                                                                          args.cluster_name)
         subprocess.run('sed -i \'s|PySpark|{0}|g\' /home/{1}/.local/share/jupyter/kernels/pysparkkernel/kernel.json'.format(
-            pyspark_kernel_name, args.os_user), shell=True)
+            pyspark_kernel_name, args.os_user), shell=True, check=True)
         spark_kernel_name = 'Spark (Scala-{0} / Spark-{1} ) [{2}]'.format(args.scala_version, args.spark_version,
                                                                          args.cluster_name)
         subprocess.run('sed -i \'s|Spark|{0}|g\' /home/{1}/.local/share/jupyter/kernels/sparkkernel/kernel.json'.format(
-            spark_kernel_name, args.os_user), shell=True)
+            spark_kernel_name, args.os_user), shell=True, check=True)
         sparkr_kernel_name = 'SparkR (R-{0} / Spark-{1} ) [{2}]'.format(args.r_version, args.spark_version,
                                                                             args.cluster_name)
         subprocess.run('sed -i \'s|SparkR|{0}|g\' /home/{1}/.local/share/jupyter/kernels/sparkrkernel/kernel.json'.format(
-            sparkr_kernel_name, args.os_user), shell=True)
-        subprocess.run('mkdir -p /home/' + args.os_user + '/.sparkmagic', shell=True)
-        subprocess.run('cp -f /tmp/sparkmagic_config_template.json /home/' + args.os_user + '/.sparkmagic/config.json', shell=True)
+            sparkr_kernel_name, args.os_user), shell=True, check=True)
+        subprocess.run('mkdir -p /home/' + args.os_user + '/.sparkmagic', shell=True, check=True)
+        subprocess.run('cp -f /tmp/sparkmagic_config_template.json /home/' + args.os_user + '/.sparkmagic/config.json', shell=True, check=True)
         subprocess.run('sed -i \'s|LIVY_HOST|{0}|g\' /home/{1}/.sparkmagic/config.json'.format(
-                args.master_ip, args.os_user), shell=True)
-        subprocess.run('sudo chown -R {0}:{0} /home/{0}/.sparkmagic/'.format(args.os_user), shell=True)
+                args.master_ip, args.os_user), shell=True, check=True)
+        subprocess.run('sudo chown -R {0}:{0} /home/{0}/.sparkmagic/'.format(args.os_user), shell=True, check=True)
     except:
         sys.exit(1)
 
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/jupyterlab_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/jupyterlab_configure.py
index f82d719..4207257 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/jupyterlab_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/jupyterlab_configure.py
@@ -111,7 +111,7 @@ if __name__ == "__main__":
             notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -129,7 +129,7 @@ if __name__ == "__main__":
             .format(instance_hostname, notebook_config['instance_name'], notebook_config['ssh_key_path'],
                     json.dumps(additional_config), notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -146,7 +146,7 @@ if __name__ == "__main__":
             format(instance_hostname, notebook_config['ssh_key_path'], notebook_config['datalab_ssh_user'],
                    os.environ['gcp_region'], edge_instance_private_ip)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -170,7 +170,7 @@ if __name__ == "__main__":
                    os.environ['notebook_scala_version'], os.environ['notebook_r_mirror'],
                    notebook_config['exploratory_name'], )
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_jupyterlab_node', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_jupyterlab_node', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -188,7 +188,7 @@ if __name__ == "__main__":
             instance_hostname, notebook_config['ssh_key_path'], json.dumps(additional_config),
             notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed installing users key")
             raise Exception
@@ -203,8 +203,8 @@ if __name__ == "__main__":
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, notebook_config['ssh_key_path'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_download_git_certfile', params), shell=True)
-            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_download_git_certfile', params), shell=True, check=True)
+            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed setup git credentials")
             raise Exception
@@ -256,7 +256,7 @@ if __name__ == "__main__":
                     notebook_config['exploratory_name'],
                     json.dumps(additional_info))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
@@ -275,7 +275,7 @@ if __name__ == "__main__":
                    notebook_config['ssh_key_path'],
                    notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/configure_proxy_for_docker.py {}".format(params), shell=True)
+            subprocess.run("~/scripts/configure_proxy_for_docker.py {}".format(params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -295,7 +295,7 @@ if __name__ == "__main__":
                    notebook_config['ssh_key_path'],
                    notebook_config['datalab_ssh_user'])
         try:
-           subprocess.run("~/scripts/jupyterlab_container_start.py {}".format(params), shell=True)
+           subprocess.run("~/scripts/jupyterlab_container_start.py {}".format(params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/project_prepare.py b/infrastructure-provisioning/src/general/scripts/gcp/project_prepare.py
index d2455a4..6a1ca24 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/project_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/project_prepare.py
@@ -132,7 +132,7 @@ if __name__ == "__main__":
             project_conf['user_key'] = os.environ['key']
             try:
                 subprocess.run('echo "{0}" >> {1}{2}.pub'.format(project_conf['user_key'], os.environ['conf_key_dir'],
-                                                        project_conf['project_name']), shell=True)
+                                                        project_conf['project_name']), shell=True, check=True)
             except:
                 print("ADMINSs PUBLIC KEY DOES NOT INSTALLED")
         except KeyError:
@@ -154,7 +154,7 @@ if __name__ == "__main__":
                          project_conf['private_subnet_prefix'], project_conf['vpc_cidr'],
                          project_conf['user_subnets_range'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_subnet', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_subnet', params), shell=True, check=True)
             project_conf['private_subnet_cidr'] = GCPMeta.get_subnet(project_conf['private_subnet_name'],
                                                                      project_conf['region'])['ipCidrRange']
         except:
@@ -178,7 +178,7 @@ if __name__ == "__main__":
             project_conf['edge_unique_index'], project_conf['service_base_name'])
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_service_account', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_service_account', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -202,7 +202,7 @@ if __name__ == "__main__":
                   project_conf['ps_roles_path'], project_conf['ps_unique_index'], project_conf['service_base_name'])
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_service_account', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_service_account', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -303,7 +303,7 @@ if __name__ == "__main__":
 
         params = "--firewall '{}'".format(json.dumps(firewall_rules))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_firewall', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_firewall', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -381,7 +381,7 @@ if __name__ == "__main__":
 
         params = "--firewall '{}'".format(json.dumps(firewall_rules))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_firewall', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_firewall', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -411,7 +411,7 @@ if __name__ == "__main__":
         params = "--bucket_name {} --tags '{}'".format(project_conf['shared_bucket_name'],
                                                        json.dumps(project_conf['shared_bucket_tags']))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_bucket', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_bucket', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -427,7 +427,7 @@ if __name__ == "__main__":
                                                        json.dumps(project_conf['bucket_tags']))
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_bucket', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_bucket', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -478,7 +478,7 @@ if __name__ == "__main__":
         print('[CREATING STATIC IP ADDRESS]')
         params = "--address_name {} --region {}".format(project_conf['static_address_name'], project_conf['region'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('edge_create_static_ip', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('edge_create_static_ip', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -525,7 +525,7 @@ if __name__ == "__main__":
                   'edge', project_conf['static_ip'], project_conf['network_tag'],
                   json.dumps(project_conf['instance_labels']), project_conf['service_base_name'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_instance', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_instance', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -561,7 +561,7 @@ if __name__ == "__main__":
                                                                                                        project_conf['ps_firewall_target'],
                                                                                                        edge_instance)
             try:
-                subprocess.run("~/scripts/{}.py {}".format('common_create_nat_route', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('common_create_nat_route', params), shell=True, check=True)
             except:
                 traceback.print_exc()
                 raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/rstudio_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/rstudio_configure.py
index 110e87d..8b51ca0 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/rstudio_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/rstudio_configure.py
@@ -114,7 +114,7 @@ if __name__ == "__main__":
             notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -133,7 +133,7 @@ if __name__ == "__main__":
                     json.dumps(additional_config),
                     notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -150,7 +150,7 @@ if __name__ == "__main__":
             format(instance_hostname, notebook_config['ssh_key_path'], notebook_config['datalab_ssh_user'],
                    os.environ['gcp_region'], edge_instance_private_ip)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -173,7 +173,7 @@ if __name__ == "__main__":
                     os.environ['notebook_r_mirror'], notebook_config['ip_address'],
                     notebook_config['exploratory_name'], edge_instance_private_ip)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_rstudio_node', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_rstudio_node', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -191,7 +191,7 @@ if __name__ == "__main__":
             instance_hostname, notebook_config['ssh_key_path'], json.dumps(additional_config),
             notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed installing users key")
             raise Exception
@@ -206,8 +206,8 @@ if __name__ == "__main__":
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, notebook_config['ssh_key_path'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_download_git_certfile', params), shell=True)
-            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_download_git_certfile', params), shell=True, check=True)
+            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed setup git credentials")
             raise Exception
@@ -259,7 +259,7 @@ if __name__ == "__main__":
                     notebook_config['exploratory_name'],
                     json.dumps(additional_info))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/rstudio_dataengine-service_create_configs.py b/infrastructure-provisioning/src/general/scripts/gcp/rstudio_dataengine-service_create_configs.py
index 32ada24..11e5283 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/rstudio_dataengine-service_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/rstudio_dataengine-service_create_configs.py
@@ -53,28 +53,28 @@ yarn_dir = '/opt/' + args.dataproc_version + '/' + args.cluster_name + '/conf/'
 def configure_rstudio():
     if not os.path.exists('/home/' + args.os_user + '/.ensure_dir/rstudio_dataengine-service_ensured'):
         try:
-            subprocess.run('echo "export R_LIBS_USER=' + spark_dir + '/R/lib:" >> /home/' + args.os_user + '/.bashrc', shell=True)
-            subprocess.run("sed -i 's/^SPARK_HOME/#SPARK_HOME/' /home/" + args.os_user + "/.Renviron", shell=True)
-            subprocess.run('echo \'SPARK_HOME="' + spark_dir + '"\' >> /home/' + args.os_user + '/.Renviron', shell=True)
-            subprocess.run('echo \'YARN_CONF_DIR="' + yarn_dir + '"\' >> /home/' + args.os_user + '/.Renviron', shell=True)
-            subprocess.run('echo \'HADOOP_CONF_DIR="' + yarn_dir + '"\' >> /home/' + args.os_user + '/.Renviron', shell=True)
-            subprocess.run("sed -i 's/^master/#master/' /home/" + args.os_user + "/.Rprofile", shell=True)
-            subprocess.run('''R -e "source('/home/{}/.Rprofile')"'''.format(args.os_user), shell=True)
-            subprocess.run('touch /home/' + args.os_user + '/.ensure_dir/rstudio_dataengine-service_ensured', shell=True)
+            subprocess.run('echo "export R_LIBS_USER=' + spark_dir + '/R/lib:" >> /home/' + args.os_user + '/.bashrc', shell=True, check=True)
+            subprocess.run("sed -i 's/^SPARK_HOME/#SPARK_HOME/' /home/" + args.os_user + "/.Renviron", shell=True, check=True)
+            subprocess.run('echo \'SPARK_HOME="' + spark_dir + '"\' >> /home/' + args.os_user + '/.Renviron', shell=True, check=True)
+            subprocess.run('echo \'YARN_CONF_DIR="' + yarn_dir + '"\' >> /home/' + args.os_user + '/.Renviron', shell=True, check=True)
+            subprocess.run('echo \'HADOOP_CONF_DIR="' + yarn_dir + '"\' >> /home/' + args.os_user + '/.Renviron', shell=True, check=True)
+            subprocess.run("sed -i 's/^master/#master/' /home/" + args.os_user + "/.Rprofile", shell=True, check=True)
+            subprocess.run('''R -e "source('/home/{}/.Rprofile')"'''.format(args.os_user), shell=True, check=True)
+            subprocess.run('touch /home/' + args.os_user + '/.ensure_dir/rstudio_dataengine-service_ensured', shell=True, check=True)
         except Exception as err:
             print('Error: {0}'.format(err))
             sys.exit(1)
     else:
         try:
-            subprocess.run("sed -i '/R_LIBS_USER/ { s|=\(.*\)|=\\1" + spark_dir + "/R/lib:| }' /home/" + args.os_user + "/.bashrc", shell=True)
-            subprocess.run("sed -i 's/^SPARK_HOME/#SPARK_HOME/' /home/" + args.os_user + "/.Renviron", shell=True)
-            subprocess.run("sed -i 's/^YARN_CONF_DIR/#YARN_CONF_DIR/' /home/" + args.os_user + "/.Renviron", shell=True)
-            subprocess.run("sed -i 's/^HADOOP_CONF_DIR/#HADOOP_CONF_DIR/' /home/" + args.os_user + "/.Renviron", shell=True)
-            subprocess.run("sed -i 's/^master/#master/' /home/" + args.os_user + "/.Rprofile", shell=True)
-            subprocess.run('echo \'SPARK_HOME="' + spark_dir + '"\' >> /home/' + args.os_user + '/.Renviron', shell=True)
-            subprocess.run('echo \'YARN_CONF_DIR="' + yarn_dir + '"\' >> /home/' + args.os_user + '/.Renviron', shell=True)
-            subprocess.run('echo \'HADOOP_CONF_DIR="' + yarn_dir + '"\' >> /home/' + args.os_user + '/.Renviron', shell=True)
-            subprocess.run('''R -e "source('/home/{}/.Rprofile')"'''.format(args.os_user), shell=True)
+            subprocess.run("sed -i '/R_LIBS_USER/ { s|=\(.*\)|=\\1" + spark_dir + "/R/lib:| }' /home/" + args.os_user + "/.bashrc", shell=True, check=True)
+            subprocess.run("sed -i 's/^SPARK_HOME/#SPARK_HOME/' /home/" + args.os_user + "/.Renviron", shell=True, check=True)
+            subprocess.run("sed -i 's/^YARN_CONF_DIR/#YARN_CONF_DIR/' /home/" + args.os_user + "/.Renviron", shell=True, check=True)
+            subprocess.run("sed -i 's/^HADOOP_CONF_DIR/#HADOOP_CONF_DIR/' /home/" + args.os_user + "/.Renviron", shell=True, check=True)
+            subprocess.run("sed -i 's/^master/#master/' /home/" + args.os_user + "/.Rprofile", shell=True, check=True)
+            subprocess.run('echo \'SPARK_HOME="' + spark_dir + '"\' >> /home/' + args.os_user + '/.Renviron', shell=True, check=True)
+            subprocess.run('echo \'YARN_CONF_DIR="' + yarn_dir + '"\' >> /home/' + args.os_user + '/.Renviron', shell=True, check=True)
+            subprocess.run('echo \'HADOOP_CONF_DIR="' + yarn_dir + '"\' >> /home/' + args.os_user + '/.Renviron', shell=True, check=True)
+            subprocess.run('''R -e "source('/home/{}/.Rprofile')"'''.format(args.os_user), shell=True, check=True)
         except Exception as err:
             print('Error:', str(err))
             sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/rstudio_install_dataengine-service_kernels.py b/infrastructure-provisioning/src/general/scripts/gcp/rstudio_install_dataengine-service_kernels.py
index af436d6..12b2be1 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/rstudio_install_dataengine-service_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/rstudio_install_dataengine-service_kernels.py
@@ -54,7 +54,7 @@ def configure_notebook(args):
     conn.sudo('chmod 755 /usr/local/bin/create_configs.py')
     conn.sudo('mkdir -p /usr/lib/python3.8/datalab/')
     conn.run('mkdir -p /tmp/datalab_libs/')
-    subprocess.run('scp -i {} /usr/lib/python3.8/datalab/*.py {}:/tmp/datalab_libs/'.format(args.keyfile, env.host_string), shell=True)
+    subprocess.run('scp -i {} /usr/lib/python3.8/datalab/*.py {}:/tmp/datalab_libs/'.format(args.keyfile, env.host_string), shell=True, check=True)
     conn.run('chmod a+x /tmp/datalab_libs/*')
     conn.sudo('mv /tmp/datalab_libs/* /usr/lib/python3.8/datalab/')
     if exists('/usr/lib64'):
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/ssn_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/ssn_configure.py
index 101ba48..22694ec 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/ssn_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/ssn_configure.py
@@ -149,7 +149,7 @@ if __name__ == "__main__":
             ssn_conf['datalab_ssh_user'], ssn_conf['sudo_group'])
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True, check=True)
             #if subprocess.returncode != 0:
              #   raise Exception
         except:
@@ -170,7 +170,7 @@ if __name__ == "__main__":
                    ssn_conf['datalab_ssh_user'], ssn_conf['region'])
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -193,7 +193,7 @@ if __name__ == "__main__":
                    ssn_conf['step_cert_sans'])
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_ssn_node', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_ssn_node', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -225,7 +225,7 @@ if __name__ == "__main__":
                    os.environ['conf_cloud_provider'], ssn_conf['region'])
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_docker', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_docker', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -518,7 +518,7 @@ if __name__ == "__main__":
                    json.dumps(cloud_params), os.environ['keycloak_client_name'], os.environ['keycloak_client_secret'],
                    os.environ['keycloak_auth_server_url'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_ui', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_ui', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -571,7 +571,7 @@ if __name__ == "__main__":
         params = "--instance_name {} --local_log_filepath {} --os_user {} --instance_hostname {}". \
             format(ssn_conf['instance_name'], local_log_filepath, ssn_conf['datalab_ssh_user'],
                    ssn_conf['instance_hostname'])
-        subprocess.run("~/scripts/{}.py {}".format('upload_response_file', params), shell=True)
+        subprocess.run("~/scripts/{}.py {}".format('upload_response_file', params), shell=True, check=True)
     except Exception as err:
         datalab.fab.append_result("Error with writing results.", str(err))
         clear_resources()
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/ssn_prepare.py b/infrastructure-provisioning/src/general/scripts/gcp/ssn_prepare.py
index 9898020..f8a269c 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/ssn_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/ssn_prepare.py
@@ -98,7 +98,7 @@ if __name__ == "__main__":
             print('[CREATE VPC]')
             params = "--vpc_name {}".format(ssn_conf['vpc_name'])
             try:
-                subprocess.run("~/scripts/{}.py {}".format('ssn_create_vpc', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('ssn_create_vpc', params), shell=True, check=True)
                 os.environ['gcp_vpc_name'] = ssn_conf['vpc_name']
             except:
                 traceback.print_exc()
@@ -127,7 +127,7 @@ if __name__ == "__main__":
                 format(ssn_conf['subnet_name'], ssn_conf['region'], ssn_conf['vpc_selflink'], ssn_conf['subnet_prefix'],
                        ssn_conf['vpc_cidr'], True)
             try:
-                subprocess.run("~/scripts/{}.py {}".format('common_create_subnet', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('common_create_subnet', params), shell=True, check=True)
                 os.environ['gcp_subnet_name'] = ssn_conf['subnet_name']
             except:
                 traceback.print_exc()
@@ -193,7 +193,7 @@ if __name__ == "__main__":
 
             params = "--firewall '{}'".format(json.dumps(firewall_rules))
             try:
-                subprocess.run("~/scripts/{}.py {}".format('common_create_firewall', params), shell=True)
+                subprocess.run("~/scripts/{}.py {}".format('common_create_firewall', params), shell=True, check=True)
                 os.environ['gcp_firewall_name'] = ssn_conf['firewall_name']
             except:
                 traceback.print_exc()
@@ -214,7 +214,7 @@ if __name__ == "__main__":
                                                   ssn_conf['ssn_policy_path'], ssn_conf['ssn_roles_path'],
                                                   ssn_conf['ssn_unique_index'], ssn_conf['service_base_name'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_service_account', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_service_account', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -239,7 +239,7 @@ if __name__ == "__main__":
         print('[CREATING STATIC IP ADDRESS]')
         params = "--address_name {} --region {}".format(ssn_conf['static_address_name'], ssn_conf['region'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('ssn_create_static_ip', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('ssn_create_static_ip', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -284,7 +284,7 @@ if __name__ == "__main__":
                    ssn_conf['static_ip'], ssn_conf['network_tag'], json.dumps(ssn_conf['instance_labels']), '20',
                    ssn_conf['service_base_name'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_create_instance', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_create_instance', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/ssn_terminate.py b/infrastructure-provisioning/src/general/scripts/gcp/ssn_terminate.py
index 0c5d235..efe84cf 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/ssn_terminate.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/ssn_terminate.py
@@ -59,7 +59,7 @@ if __name__ == "__main__":
         params = "--service_base_name {} --region {} --zone {} --pre_defined_vpc {} --vpc_name {}".format(
             ssn_conf['service_base_name'], ssn_conf['region'], ssn_conf['zone'], pre_defined_vpc, ssn_conf['vpc_name'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('ssn_terminate_gcp_resources', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('ssn_terminate_gcp_resources', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/superset_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/superset_configure.py
index 514da40..0f57a46 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/superset_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/superset_configure.py
@@ -113,7 +113,7 @@ if __name__ == "__main__":
             notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -131,7 +131,7 @@ if __name__ == "__main__":
             .format(instance_hostname, notebook_config['instance_name'], notebook_config['ssh_key_path'],
                     json.dumps(additional_config), notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -215,7 +215,7 @@ if __name__ == "__main__":
                    keycloak_client_secret, edge_instance_private_ip,
                    edge_instance_hostname, notebook_config['exploratory_name'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_superset_node', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_superset_node', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -233,7 +233,7 @@ if __name__ == "__main__":
             instance_hostname, notebook_config['ssh_key_path'], json.dumps(additional_config),
             notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed installing users key")
             raise Exception
@@ -248,8 +248,8 @@ if __name__ == "__main__":
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, notebook_config['ssh_key_path'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_download_git_certfile', params), shell=True)
-            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_download_git_certfile', params), shell=True, check=True)
+            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed setup git credentials")
             raise Exception
@@ -301,7 +301,7 @@ if __name__ == "__main__":
                     notebook_config['exploratory_name'],
                     json.dumps(additional_info))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
@@ -320,7 +320,7 @@ if __name__ == "__main__":
                    notebook_config['ssh_key_path'],
                    notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/configure_proxy_for_docker.py {}".format(params), shell=True)
+            subprocess.run("~/scripts/configure_proxy_for_docker.py {}".format(params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -339,7 +339,7 @@ if __name__ == "__main__":
                    notebook_config['ssh_key_path'],
                    notebook_config['datalab_ssh_user'])
         try:
-           subprocess.run("~/scripts/superset_start.py {}".format(params), shell=True)
+           subprocess.run("~/scripts/superset_start.py {}".format(params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/tensor-rstudio_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/tensor-rstudio_configure.py
index 108b55d..56ddd56 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/tensor-rstudio_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/tensor-rstudio_configure.py
@@ -118,7 +118,7 @@ if __name__ == "__main__":
             notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
 
         try:
-            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -136,7 +136,7 @@ if __name__ == "__main__":
             .format(instance_hostname, notebook_config['instance_name'], notebook_config['ssh_key_path'],
                     json.dumps(additional_config), notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -153,7 +153,7 @@ if __name__ == "__main__":
             format(instance_hostname, notebook_config['ssh_key_path'], notebook_config['datalab_ssh_user'],
                    os.environ['gcp_region'], edge_instance_private_ip)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -175,7 +175,7 @@ if __name__ == "__main__":
                     os.environ['notebook_rstudio_version'], notebook_config['datalab_ssh_user'],
                     os.environ['notebook_r_mirror'], notebook_config['exploratory_name'], edge_instance_private_ip)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_tensor-rstudio_node', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_tensor-rstudio_node', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -193,7 +193,7 @@ if __name__ == "__main__":
             instance_hostname, notebook_config['ssh_key_path'], json.dumps(additional_config),
             notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed installing users key")
             raise Exception
@@ -208,8 +208,8 @@ if __name__ == "__main__":
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, notebook_config['ssh_key_path'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_download_git_certfile', params), shell=True)
-            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_download_git_certfile', params), shell=True, check=True)
+            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed setup git credentials")
             raise Exception
@@ -261,7 +261,7 @@ if __name__ == "__main__":
                     notebook_config['exploratory_name'],
                     json.dumps(additional_info))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/tensor_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/tensor_configure.py
index 2289951..f26bb69 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/tensor_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/tensor_configure.py
@@ -111,7 +111,7 @@ if __name__ == "__main__":
             instance_hostname, notebook_config['ssh_key_path'], notebook_config['initial_user'],
             notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -129,7 +129,7 @@ if __name__ == "__main__":
             .format(instance_hostname, notebook_config['instance_name'], notebook_config['ssh_key_path'],
                     json.dumps(additional_config), notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -146,7 +146,7 @@ if __name__ == "__main__":
             format(instance_hostname, notebook_config['ssh_key_path'], notebook_config['datalab_ssh_user'],
                    os.environ['gcp_region'], edge_instance_private_ip)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -164,7 +164,7 @@ if __name__ == "__main__":
                          os.environ['gcp_region'], notebook_config['datalab_ssh_user'],
                          notebook_config['exploratory_name'], edge_instance_private_ip)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_tensor_node', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_tensor_node', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -182,7 +182,7 @@ if __name__ == "__main__":
             instance_hostname, notebook_config['ssh_key_path'], json.dumps(additional_config),
             notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed installing users key")
             raise Exception
@@ -197,8 +197,8 @@ if __name__ == "__main__":
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, notebook_config['ssh_key_path'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_download_git_certfile', params), shell=True)
-            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_download_git_certfile', params), shell=True, check=True)
+            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed setup git credentials")
             raise Exception
@@ -250,7 +250,7 @@ if __name__ == "__main__":
                     notebook_config['exploratory_name'],
                     json.dumps(additional_info))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/zeppelin_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/zeppelin_configure.py
index 52c798f..c55f2e4 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/zeppelin_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/zeppelin_configure.py
@@ -111,7 +111,7 @@ if __name__ == "__main__":
             instance_hostname, notebook_config['ssh_key_path'], notebook_config['initial_user'],
             notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -129,7 +129,7 @@ if __name__ == "__main__":
             .format(instance_hostname, notebook_config['instance_name'], notebook_config['ssh_key_path'],
                     json.dumps(additional_config), notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -146,7 +146,7 @@ if __name__ == "__main__":
             format(instance_hostname, notebook_config['ssh_key_path'], notebook_config['datalab_ssh_user'],
                    os.environ['gcp_region'], edge_instance_private_ip)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_prerequisites', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -181,7 +181,7 @@ if __name__ == "__main__":
                     os.environ['notebook_r_mirror'], 'null',
                     notebook_config['exploratory_name'], edge_instance_private_ip)
         try:
-            subprocess.run("~/scripts/{}.py {}".format('configure_zeppelin_node', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('configure_zeppelin_node', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -199,7 +199,7 @@ if __name__ == "__main__":
             instance_hostname, notebook_config['ssh_key_path'], json.dumps(additional_config),
             notebook_config['datalab_ssh_user'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed installing users key")
             raise Exception
@@ -214,8 +214,8 @@ if __name__ == "__main__":
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, notebook_config['ssh_key_path'])
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_download_git_certfile', params), shell=True)
-            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_download_git_certfile', params), shell=True, check=True)
+            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed setup git credentials")
             raise Exception
@@ -267,7 +267,7 @@ if __name__ == "__main__":
                     notebook_config['exploratory_name'],
                     json.dumps(additional_info))
         try:
-            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_configure_reverse_proxy', params), shell=True, check=True)
         except:
             datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/zeppelin_dataengine-service_create_configs.py b/infrastructure-provisioning/src/general/scripts/gcp/zeppelin_dataengine-service_create_configs.py
index 1b39f61..ea33688 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/zeppelin_dataengine-service_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/zeppelin_dataengine-service_create_configs.py
@@ -54,17 +54,17 @@ yarn_dir = '/opt/' + args.dataproc_version + '/' + args.cluster_name + '/conf/'
 
 
 def install_remote_livy(args):
-    subprocess.run('sudo chown {0}:{0} -R /opt/zeppelin/'.format(args.os_user), shell=True)
-    subprocess.run('sudo service zeppelin-notebook stop', shell=True)
+    subprocess.run('sudo chown {0}:{0} -R /opt/zeppelin/'.format(args.os_user), shell=True, check=True)
+    subprocess.run('sudo service zeppelin-notebook stop', shell=True, check=True)
     subprocess.run('sudo -i wget http://archive.cloudera.com/beta/livy/livy-server-{0}.zip -O /opt/{1}/{2}/livy-server-{0}.zip'
-          .format(args.livy_version, args.dataproc_version, args.cluster_name), shell=True)
-    subprocess.run('sudo unzip /opt/{0}/{1}/livy-server-{2}.zip -d /opt/{0}/{1}/'.format(args.dataproc_version, args.cluster_name, args.livy_version), shell=True)
-    subprocess.run('sudo mv /opt/{0}/{1}/livy-server-{2}/ /opt/{0}/{1}/livy/'.format(args.dataproc_version, args.cluster_name, args.livy_version), shell=True)
+          .format(args.livy_version, args.dataproc_version, args.cluster_name), shell=True, check=True)
+    subprocess.run('sudo unzip /opt/{0}/{1}/livy-server-{2}.zip -d /opt/{0}/{1}/'.format(args.dataproc_version, args.cluster_name, args.livy_version), shell=True, check=True)
+    subprocess.run('sudo mv /opt/{0}/{1}/livy-server-{2}/ /opt/{0}/{1}/livy/'.format(args.dataproc_version, args.cluster_name, args.livy_version), shell=True, check=True)
     livy_path = '/opt/{0}/{1}/livy/'.format(args.dataproc_version, args.cluster_name)
-    subprocess.run('sudo mkdir -p {0}/logs'.format(livy_path), shell=True)
-    subprocess.run('sudo mkdir -p /var/run/livy', shell=True)
-    subprocess.run('sudo chown {0}:{0} -R /var/run/livy'.format(args.os_user), shell=True)
-    subprocess.run('sudo chown {0}:{0} -R {1}'.format(args.os_user, livy_path), shell=True)
+    subprocess.run('sudo mkdir -p {0}/logs'.format(livy_path), shell=True, check=True)
+    subprocess.run('sudo mkdir -p /var/run/livy', shell=True, check=True)
+    subprocess.run('sudo chown {0}:{0} -R /var/run/livy'.format(args.os_user), shell=True, check=True)
+    subprocess.run('sudo chown {0}:{0} -R {1}'.format(args.os_user, livy_path), shell=True, check=True)
 
 
 if __name__ == "__main__":
diff --git a/infrastructure-provisioning/src/general/scripts/os/dataengine_install_libs.py b/infrastructure-provisioning/src/general/scripts/os/dataengine_install_libs.py
index 74ceebe..05f4885 100644
--- a/infrastructure-provisioning/src/general/scripts/os/dataengine_install_libs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/dataengine_install_libs.py
@@ -42,7 +42,7 @@ def install_libs_on_slaves(slave, data_engine):
                 data_engine['keyfile'], data_engine['libs'])
     try:
         # Run script to install additional libs
-        subprocess.run("~/scripts/{}.py {}".format('install_additional_libs', params), shell=True)
+        subprocess.run("~/scripts/{}.py {}".format('install_additional_libs', params), shell=True, check=True)
     except Exception as err:
         print('Error: {0}'.format(err))
         sys.exit(1)
@@ -81,7 +81,7 @@ if __name__ == "__main__":
                     data_engine['keyfile'], data_engine['libs'])
         try:
             # Run script to install additional libs
-            subprocess.run("~/scripts/{}.py {}".format('install_additional_libs', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_additional_libs', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/os/dataengine_list_libs.py b/infrastructure-provisioning/src/general/scripts/os/dataengine_list_libs.py
index e0f1176..2580279 100644
--- a/infrastructure-provisioning/src/general/scripts/os/dataengine_list_libs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/dataengine_list_libs.py
@@ -62,7 +62,7 @@ if __name__ == "__main__":
             .format(data_engine['os_user'], data_engine['master_ip'], data_engine['keyfile'], data_engine['group_name'])
         try:
             # Run script to get available libs
-            subprocess.run("~/scripts/{}.py {}".format('get_list_available_pkgs', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('get_list_available_pkgs', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/os/dataengine_reconfigure_spark.py b/infrastructure-provisioning/src/general/scripts/os/dataengine_reconfigure_spark.py
index 3105258..41e07b4 100644
--- a/infrastructure-provisioning/src/general/scripts/os/dataengine_reconfigure_spark.py
+++ b/infrastructure-provisioning/src/general/scripts/os/dataengine_reconfigure_spark.py
@@ -41,7 +41,7 @@ def install_libs_on_slaves(slave, data_engine):
         .format(data_engine['os_user'], data_engine['slave_ip'], data_engine['keyfile'])
     try:
         # Run script to install additional libs
-        subprocess.run("~/scripts/{}.py {}".format('reconfigure_spark', params), shell=True)
+        subprocess.run("~/scripts/{}.py {}".format('reconfigure_spark', params), shell=True, check=True)
     except Exception as err:
         print('Error: {0}'.format(err))
         sys.exit(1)
@@ -81,7 +81,7 @@ if __name__ == "__main__":
             .format(data_engine['os_user'], data_engine['master_ip'], data_engine['keyfile'])
         try:
             # Run script to install additional libs
-            subprocess.run("~/scripts/{}.py {}".format('reconfigure_spark', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('reconfigure_spark', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
@@ -105,7 +105,7 @@ if __name__ == "__main__":
                                             data_engine['cluster_name'])
         try:
             # Run script to get available libs
-            subprocess.run("~/scripts/{}.py {}".format('reconfigure_spark', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('reconfigure_spark', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/os/deeplearning_dataengine_create_configs.py b/infrastructure-provisioning/src/general/scripts/os/deeplearning_dataengine_create_configs.py
index 25bdd6d..f202afb 100644
--- a/infrastructure-provisioning/src/general/scripts/os/deeplearning_dataengine_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/deeplearning_dataengine_create_configs.py
@@ -55,7 +55,7 @@ spark_link = "https://archive.apache.org/dist/spark/spark-" + spark_version + "/
 
 def pyspark_kernel(args):
     spark_path = '/opt/' + args.cluster_name + '/spark/'
-    subprocess.run('mkdir -p ' + kernels_dir + 'pyspark_' + args.cluster_name + '/', shell=True)
+    subprocess.run('mkdir -p ' + kernels_dir + 'pyspark_' + args.cluster_name + '/', shell=True, check=True)
     kernel_path = kernels_dir + "pyspark_" + args.cluster_name + "/kernel.json"
     template_file = "/tmp/{}/pyspark_dataengine_template.json".format(args.cluster_name)
     with open(template_file, 'r') as f:
@@ -69,13 +69,13 @@ def pyspark_kernel(args):
     text = text.replace('PYTHON_PATH', '/usr/bin/python3.8')
     with open(kernel_path, 'w') as f:
         f.write(text)
-    subprocess.run('touch /tmp/{}/kernel_var.json'.format(args.cluster_name), shell=True)
+    subprocess.run('touch /tmp/{}/kernel_var.json'.format(args.cluster_name), shell=True, check=True)
     subprocess.run(
         "PYJ=`find /opt/{0}/spark/ -name '*py4j*.zip' | tr '\\n' ':' | sed 's|:$||g'`; cat {1} | sed 's|PY4J|'$PYJ'|g' | sed \'/PYTHONPATH\"\:/s|\(.*\)\"|\\1/home/{2}/caffe/python:/home/{2}/pytorch/build:\"|\' > /tmp/{0}/kernel_var.json".
-        format(args.cluster_name, kernel_path, args.os_user), shell=True)
-    subprocess.run('sudo mv /tmp/{}/kernel_var.json '.format(args.cluster_name) + kernel_path, shell=True)
+        format(args.cluster_name, kernel_path, args.os_user), shell=True, check=True)
+    subprocess.run('sudo mv /tmp/{}/kernel_var.json '.format(args.cluster_name) + kernel_path, shell=True, check=True)
 
-    subprocess.run('mkdir -p ' + kernels_dir + 'py3spark_' + args.cluster_name + '/', shell=True)
+    subprocess.run('mkdir -p ' + kernels_dir + 'py3spark_' + args.cluster_name + '/', shell=True, check=True)
     kernel_path = kernels_dir + "py3spark_" + args.cluster_name + "/kernel.json"
     template_file = "/tmp/{}/pyspark_dataengine_template.json".format(args.cluster_name)
     with open(template_file, 'r') as f:
@@ -89,40 +89,40 @@ def pyspark_kernel(args):
     text = text.replace('PYTHON_PATH', '/usr/bin/python3.8')
     with open(kernel_path, 'w') as f:
         f.write(text)
-    subprocess.run('touch /tmp/{}/kernel_var.json'.format(args.cluster_name), shell=True)
+    subprocess.run('touch /tmp/{}/kernel_var.json'.format(args.cluster_name), shell=True, check=True)
     subprocess.run(
         "PYJ=`find /opt/{0}/spark/ -name '*py4j*.zip' | tr '\\n' ':' | sed 's|:$||g'`; cat {1} | sed 's|PY4J|'$PYJ'|g' | sed \'/PYTHONPATH\"\:/s|\(.*\)\"|\\1/home/{2}/caffe/python:/home/{2}/pytorch/build:\"|\' > /tmp/{0}/kernel_var.json".
-        format(args.cluster_name, kernel_path, args.os_user), shell=True)
-    subprocess.run('sudo mv /tmp/{}/kernel_var.json '.format(args.cluster_name) + kernel_path, shell=True)
+        format(args.cluster_name, kernel_path, args.os_user), shell=True, check=True)
+    subprocess.run('sudo mv /tmp/{}/kernel_var.json '.format(args.cluster_name) + kernel_path, shell=True, check=True)
 
 def install_sparkamagic_kernels(args):
     try:
-        subprocess.run('sudo jupyter nbextension enable --py --sys-prefix widgetsnbextension', shell=True)
-        sparkmagic_dir = subprocess.run("sudo pip3 show sparkmagic | grep 'Location: ' | awk '{print $2}'", capture_output=True, shell=True)
-        subprocess.run('sudo jupyter-kernelspec install {}/sparkmagic/kernels/sparkkernel --user'.format(sparkmagic_dir), shell=True)
-        subprocess.run('sudo jupyter-kernelspec install {}/sparkmagic/kernels/pysparkkernel --user'.format(sparkmagic_dir), shell=True)
+        subprocess.run('sudo jupyter nbextension enable --py --sys-prefix widgetsnbextension', shell=True, check=True)
+        sparkmagic_dir = subprocess.run("sudo pip3 show sparkmagic | grep 'Location: ' | awk '{print $2}'", capture_output=True, shell=True, check=True)
+        subprocess.run('sudo jupyter-kernelspec install {}/sparkmagic/kernels/sparkkernel --user'.format(sparkmagic_dir), shell=True, check=True)
+        subprocess.run('sudo jupyter-kernelspec install {}/sparkmagic/kernels/pysparkkernel --user'.format(sparkmagic_dir), shell=True, check=True)
 
         pyspark_kernel_name = 'PySpark (Python-3.8 / Spark-{0} ) [{1}]'.format(args.spark_version,
                                                                          args.cluster_name)
         subprocess.run('sed -i \'s|PySpark|{0}|g\' /home/{1}/.local/share/jupyter/kernels/pysparkkernel/kernel.json'.format(
-            pyspark_kernel_name, args.os_user), shell=True)
-        scala_version = subprocess.run('spark-submit --version 2>&1 | grep -o -P "Scala version \K.{0,7}"', capture_output=True, shell=True)
+            pyspark_kernel_name, args.os_user), shell=True, check=True)
+        scala_version = subprocess.run('spark-submit --version 2>&1 | grep -o -P "Scala version \K.{0,7}"', capture_output=True, shell=True, check=True)
         spark_kernel_name = 'Spark (Scala-{0} / Spark-{1} ) [{2}]'.format(scala_version, args.spark_version,
                                                                          args.cluster_name)
         subprocess.run('sed -i \'s|Spark|{0}|g\' /home/{1}/.local/share/jupyter/kernels/sparkkernel/kernel.json'.format(
-            spark_kernel_name, args.os_user), shell=True)
+            spark_kernel_name, args.os_user), shell=True, check=True)
 
         subprocess.run('sudo mv -f /home/{0}/.local/share/jupyter/kernels/pysparkkernel '
-              '/home/{0}/.local/share/jupyter/kernels/pysparkkernel_{1}'.format(args.os_user, args.cluster_name), shell=True)
+              '/home/{0}/.local/share/jupyter/kernels/pysparkkernel_{1}'.format(args.os_user, args.cluster_name), shell=True, check=True)
         subprocess.run('sudo mv -f /home/{0}/.local/share/jupyter/kernels/sparkkernel '
-              '/home/{0}/.local/share/jupyter/kernels/sparkkernel_{1}'.format(args.os_user, args.cluster_name), shell=True)
+              '/home/{0}/.local/share/jupyter/kernels/sparkkernel_{1}'.format(args.os_user, args.cluster_name), shell=True, check=True)
 
-        subprocess.run('mkdir -p /home/' + args.os_user + '/.sparkmagic', shell=True)
-        subprocess.run('cp -f /tmp/sparkmagic_config_template.json /home/' + args.os_user + '/.sparkmagic/config.json', shell=True)
+        subprocess.run('mkdir -p /home/' + args.os_user + '/.sparkmagic', shell=True, check=True)
+        subprocess.run('cp -f /tmp/sparkmagic_config_template.json /home/' + args.os_user + '/.sparkmagic/config.json', shell=True, check=True)
         spark_master_ip = args.spark_master.split('//')[1].split(':')[0]
         subprocess.run('sed -i \'s|LIVY_HOST|{0}|g\' /home/{1}/.sparkmagic/config.json'.format(
-                spark_master_ip, args.os_user), shell=True)
-        subprocess.run('sudo chown -R {0}:{0} /home/{0}/.sparkmagic/'.format(args.os_user), shell=True)
+                spark_master_ip, args.os_user), shell=True, check=True)
+        subprocess.run('sudo chown -R {0}:{0} /home/{0}/.sparkmagic/'.format(args.os_user), shell=True, check=True)
     except:
         sys.exit(1)
 
diff --git a/infrastructure-provisioning/src/general/scripts/os/jupyter_dataengine_create_configs.py b/infrastructure-provisioning/src/general/scripts/os/jupyter_dataengine_create_configs.py
index a5b3c11..f9d68f4 100644
--- a/infrastructure-provisioning/src/general/scripts/os/jupyter_dataengine_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/jupyter_dataengine_create_configs.py
@@ -56,10 +56,10 @@ spark_link = "https://archive.apache.org/dist/spark/spark-" + spark_version + "/
 
 def r_kernel(args):
     spark_path = '/opt/{}/spark/'.format(args.cluster_name)
-    subprocess.run('mkdir -p {}/r_{}/'.format(kernels_dir, args.cluster_name), shell=True)
+    subprocess.run('mkdir -p {}/r_{}/'.format(kernels_dir, args.cluster_name), shell=True, check=True)
     kernel_path = "{}/r_{}/kernel.json".format(kernels_dir, args.cluster_name)
     template_file = "/tmp/{}/r_dataengine_template.json".format(args.cluster_name)
-    r_version = subprocess.run("R --version | awk '/version / {print $3}'", capture_output = True, shell=True)
+    r_version = subprocess.run("R --version | awk '/version / {print $3}'", capture_output = True, shell=True, check=True)
 
     with open(template_file, 'r') as f:
         text = f.read()
@@ -75,11 +75,11 @@ def r_kernel(args):
 
 def toree_kernel(args):
     spark_path = '/opt/' + args.cluster_name + '/spark/'
-    scala_version = subprocess.run('spark-submit --version 2>&1 | grep -o -P "Scala version \K.{0,7}"', capture_output=True, shell=True)
-    subprocess.run('mkdir -p ' + kernels_dir + 'toree_' + args.cluster_name + '/', shell=True)
-    subprocess.run('tar zxvf /tmp/{}/toree_kernel.tar.gz -C '.format(args.cluster_name) + kernels_dir + 'toree_' + args.cluster_name + '/', shell=True)
-    subprocess.run('sudo mv {0}toree_{1}/toree-0.3.0-incubating/* {0}toree_{1}/'.format(kernels_dir, args.cluster_name), shell=True)
-    subprocess.run('sudo rm -r {0}toree_{1}/toree-0.3.0-incubating'.format(kernels_dir, args.cluster_name), shell=True)
+    scala_version = subprocess.run('spark-submit --version 2>&1 | grep -o -P "Scala version \K.{0,7}"', capture_output=True, shell=True, check=True)
+    subprocess.run('mkdir -p ' + kernels_dir + 'toree_' + args.cluster_name + '/', shell=True, check=True)
+    subprocess.run('tar zxvf /tmp/{}/toree_kernel.tar.gz -C '.format(args.cluster_name) + kernels_dir + 'toree_' + args.cluster_name + '/', shell=True, check=True)
+    subprocess.run('sudo mv {0}toree_{1}/toree-0.3.0-incubating/* {0}toree_{1}/'.format(kernels_dir, args.cluster_name), shell=True, check=True)
+    subprocess.run('sudo rm -r {0}toree_{1}/toree-0.3.0-incubating'.format(kernels_dir, args.cluster_name), shell=True, check=True)
     kernel_path = kernels_dir + "toree_" + args.cluster_name + "/kernel.json"
     template_file = "/tmp/{}/toree_dataengine_template.json".format(args.cluster_name)
     with open(template_file, 'r') as f:
@@ -92,12 +92,12 @@ def toree_kernel(args):
     text = text.replace('SCALA_VERSION', scala_version)
     with open(kernel_path, 'w') as f:
         f.write(text)
-    subprocess.run('touch /tmp/{}/kernel_var.json'.format(args.cluster_name), shell=True)
+    subprocess.run('touch /tmp/{}/kernel_var.json'.format(args.cluster_name), shell=True, check=True)
     subprocess.run(
         "PYJ=`find /opt/" + args.cluster_name +
         "/spark/ -name '*py4j*.zip' | tr '\\n' ':' | sed 's|:$||g'`; cat " + kernel_path +
-        " | sed 's|PY4J|'$PYJ'|g' > /tmp/{}/kernel_var.json".format(args.cluster_name), shell=True)
-    subprocess.run('sudo mv /tmp/{}/kernel_var.json '.format(args.cluster_name) + kernel_path, shell=True)
+        " | sed 's|PY4J|'$PYJ'|g' > /tmp/{}/kernel_var.json".format(args.cluster_name), shell=True, check=True)
+    subprocess.run('sudo mv /tmp/{}/kernel_var.json '.format(args.cluster_name) + kernel_path, shell=True, check=True)
     run_sh_path = kernels_dir + "toree_" + args.cluster_name + "/bin/run.sh"
     template_sh_file = '/tmp/{}/run_template.sh'.format(args.cluster_name)
     with open(template_sh_file, 'r') as f:
@@ -110,7 +110,7 @@ def toree_kernel(args):
 
 def pyspark_kernel(args):
     spark_path = '/opt/' + args.cluster_name + '/spark/'
-    subprocess.run('mkdir -p ' + kernels_dir + 'pyspark_' + args.cluster_name + '/', shell=True)
+    subprocess.run('mkdir -p ' + kernels_dir + 'pyspark_' + args.cluster_name + '/', shell=True, check=True)
     kernel_path = kernels_dir + "pyspark_" + args.cluster_name + "/kernel.json"
     template_file = "/tmp/{}/pyspark_dataengine_template.json".format(args.cluster_name)
     with open(template_file, 'r') as f:
@@ -124,13 +124,13 @@ def pyspark_kernel(args):
     text = text.replace('PYTHON_PATH', '/usr/bin/python3.8')
     with open(kernel_path, 'w') as f:
         f.write(text)
-    subprocess.run('touch /tmp/{}/kernel_var.json'.format(args.cluster_name), shell=True)
+    subprocess.run('touch /tmp/{}/kernel_var.json'.format(args.cluster_name), shell=True, check=True)
     subprocess.run(
         "PYJ=`find /opt/{0}/spark/ -name '*py4j*.zip' | tr '\\n' ':' | sed 's|:$||g'`; cat {1} | sed 's|PY4J|'$PYJ'|g' | sed \'/PYTHONPATH\"\:/s|\(.*\)\"|\\1/home/{2}/caffe/python:/home/{2}/pytorch/build:\"|\' > /tmp/{0}/kernel_var.json".
-        format(args.cluster_name, kernel_path, args.os_user), shell=True)
-    subprocess.run('sudo mv /tmp/{}/kernel_var.json '.format(args.cluster_name) + kernel_path, shell=True)
+        format(args.cluster_name, kernel_path, args.os_user), shell=True, check=True)
+    subprocess.run('sudo mv /tmp/{}/kernel_var.json '.format(args.cluster_name) + kernel_path, shell=True, check=True)
 
-    subprocess.run('mkdir -p ' + kernels_dir + 'py3spark_' + args.cluster_name + '/', shell=True)
+    subprocess.run('mkdir -p ' + kernels_dir + 'py3spark_' + args.cluster_name + '/', shell=True, check=True)
     kernel_path = kernels_dir + "py3spark_" + args.cluster_name + "/kernel.json"
     template_file = "/tmp/{}/pyspark_dataengine_template.json".format(args.cluster_name)
     with open(template_file, 'r') as f:
@@ -144,45 +144,45 @@ def pyspark_kernel(args):
     text = text.replace('PYTHON_PATH', '/usr/bin/python3.8')
     with open(kernel_path, 'w') as f:
         f.write(text)
-    subprocess.run('touch /tmp/{}/kernel_var.json'.format(args.cluster_name), shell=True)
+    subprocess.run('touch /tmp/{}/kernel_var.json'.format(args.cluster_name), shell=True, check=True)
     subprocess.run(
         "PYJ=`find /opt/{0}/spark/ -name '*py4j*.zip' | tr '\\n' ':' | sed 's|:$||g'`; cat {1} | sed 's|PY4J|'$PYJ'|g' | sed \'/PYTHONPATH\"\:/s|\(.*\)\"|\\1/home/{2}/caffe/python:/home/{2}/pytorch/build:\"|\' > /tmp/{0}/kernel_var.json".
-        format(args.cluster_name, kernel_path, args.os_user), shell=True)
-    subprocess.run('sudo mv /tmp/{}/kernel_var.json '.format(args.cluster_name) + kernel_path, shell=True)
+        format(args.cluster_name, kernel_path, args.os_user), shell=True, check=True)
+    subprocess.run('sudo mv /tmp/{}/kernel_var.json '.format(args.cluster_name) + kernel_path, shell=True, check=True)
 
 def install_sparkamagic_kernels(args):
     try:
-        subprocess.run('sudo jupyter nbextension enable --py --sys-prefix widgetsnbextension', shell=True)
-        sparkmagic_dir = subprocess.run("sudo pip3 show sparkmagic | grep 'Location: ' | awk '{print $2}'", capture_output=True, shell=True)
-        subprocess.run('sudo jupyter-kernelspec install {}/sparkmagic/kernels/sparkkernel --user'.format(sparkmagic_dir), shell=True)
-        subprocess.run('sudo jupyter-kernelspec install {}/sparkmagic/kernels/pysparkkernel --user'.format(sparkmagic_dir), shell=True)
-        subprocess.run('sudo jupyter-kernelspec install {}/sparkmagic/kernels/sparkrkernel --user'.format(sparkmagic_dir), shell=True)
+        subprocess.run('sudo jupyter nbextension enable --py --sys-prefix widgetsnbextension', shell=True, check=True)
+        sparkmagic_dir = subprocess.run("sudo pip3 show sparkmagic | grep 'Location: ' | awk '{print $2}'", capture_output=True, shell=True, check=True)
+        subprocess.run('sudo jupyter-kernelspec install {}/sparkmagic/kernels/sparkkernel --user'.format(sparkmagic_dir), shell=True, check=True)
+        subprocess.run('sudo jupyter-kernelspec install {}/sparkmagic/kernels/pysparkkernel --user'.format(sparkmagic_dir), shell=True, check=True)
+        subprocess.run('sudo jupyter-kernelspec install {}/sparkmagic/kernels/sparkrkernel --user'.format(sparkmagic_dir), shell=True, check=True)
         pyspark_kernel_name = 'PySpark (Python-3.8 / Spark-{0} ) [{1}]'.format(args.spark_version,
                                                                          args.cluster_name)
         subprocess.run('sed -i \'s|PySpark|{0}|g\' /home/{1}/.local/share/jupyter/kernels/pysparkkernel/kernel.json'.format(
-            pyspark_kernel_name, args.os_user), shell=True)
-        scala_version = subprocess.run('spark-submit --version 2>&1 | grep -o -P "Scala version \K.{0,7}"', capture_output=True, shell=True)
+            pyspark_kernel_name, args.os_user), shell=True, check=True)
+        scala_version = subprocess.run('spark-submit --version 2>&1 | grep -o -P "Scala version \K.{0,7}"', capture_output=True, shell=True, check=True)
         spark_kernel_name = 'Spark (Scala-{0} / Spark-{1} ) [{2}]'.format(scala_version, args.spark_version,
                                                                          args.cluster_name)
         subprocess.run('sed -i \'s|Spark|{0}|g\' /home/{1}/.local/share/jupyter/kernels/sparkkernel/kernel.json'.format(
-            spark_kernel_name, args.os_user), shell=True)
-        r_version = subprocess.run("R --version | awk '/version / {print $3}'", capture_output=True, shell=True)
+            spark_kernel_name, args.os_user), shell=True, check=True)
+        r_version = subprocess.run("R --version | awk '/version / {print $3}'", capture_output=True, shell=True, check=True)
         sparkr_kernel_name = 'SparkR (R-{0} / Spark-{1} ) [{2}]'.format(str(r_version), args.spark_version,
                                                                             args.cluster_name)
         subprocess.run('sed -i \'s|SparkR|{0}|g\' /home/{1}/.local/share/jupyter/kernels/sparkrkernel/kernel.json'.format(
-            sparkr_kernel_name, args.os_user), shell=True)
+            sparkr_kernel_name, args.os_user), shell=True, check=True)
         subprocess.run('sudo mv -f /home/{0}/.local/share/jupyter/kernels/pysparkkernel '
-              '/home/{0}/.local/share/jupyter/kernels/pysparkkernel_{1}'.format(args.os_user, args.cluster_name), shell=True)
+              '/home/{0}/.local/share/jupyter/kernels/pysparkkernel_{1}'.format(args.os_user, args.cluster_name), shell=True, check=True)
         subprocess.run('sudo mv -f /home/{0}/.local/share/jupyter/kernels/sparkkernel '
-              '/home/{0}/.local/share/jupyter/kernels/sparkkernel_{1}'.format(args.os_user, args.cluster_name), shell=True)
+              '/home/{0}/.local/share/jupyter/kernels/sparkkernel_{1}'.format(args.os_user, args.cluster_name), shell=True, check=True)
         subprocess.run('sudo mv -f /home/{0}/.local/share/jupyter/kernels/sparkrkernel '
-              '/home/{0}/.local/share/jupyter/kernels/sparkrkernel_{1}'.format(args.os_user, args.cluster_name), shell=True)
-        subprocess.run('mkdir -p /home/' + args.os_user + '/.sparkmagic', shell=True)
-        subprocess.run('cp -f /tmp/sparkmagic_config_template.json /home/' + args.os_user + '/.sparkmagic/config.json', shell=True)
+              '/home/{0}/.local/share/jupyter/kernels/sparkrkernel_{1}'.format(args.os_user, args.cluster_name), shell=True, check=True)
+        subprocess.run('mkdir -p /home/' + args.os_user + '/.sparkmagic', shell=True, check=True)
+        subprocess.run('cp -f /tmp/sparkmagic_config_template.json /home/' + args.os_user + '/.sparkmagic/config.json', shell=True, check=True)
         spark_master_ip = args.spark_master.split('//')[1].split(':')[0]
         subprocess.run('sed -i \'s|LIVY_HOST|{0}|g\' /home/{1}/.sparkmagic/config.json'.format(
-                spark_master_ip, args.os_user), shell=True)
-        subprocess.run('sudo chown -R {0}:{0} /home/{0}/.sparkmagic/'.format(args.os_user), shell=True)
+                spark_master_ip, args.os_user), shell=True, check=True)
+        subprocess.run('sudo chown -R {0}:{0} /home/{0}/.sparkmagic/'.format(args.os_user), shell=True, check=True)
     except:
         sys.exit(1)
 
diff --git a/infrastructure-provisioning/src/general/scripts/os/notebook_git_creds.py b/infrastructure-provisioning/src/general/scripts/os/notebook_git_creds.py
index 47ccf86..bb2b974 100644
--- a/infrastructure-provisioning/src/general/scripts/os/notebook_git_creds.py
+++ b/infrastructure-provisioning/src/general/scripts/os/notebook_git_creds.py
@@ -55,8 +55,8 @@ if __name__ == "__main__":
             .format(notebook_config['os_user'], notebook_config['notebook_ip'], notebook_config['keyfile'])
         try:
             # Run script to manage git credentials
-            subprocess.run("~/scripts/{}.py {}".format('common_download_git_certfile', params), shell=True)
-            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_download_git_certfile', params), shell=True, check=True)
+            subprocess.run("~/scripts/{}.py {}".format('manage_git_creds', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/os/notebook_inactivity_check.py b/infrastructure-provisioning/src/general/scripts/os/notebook_inactivity_check.py
index 3108d7c..6784b81 100644
--- a/infrastructure-provisioning/src/general/scripts/os/notebook_inactivity_check.py
+++ b/infrastructure-provisioning/src/general/scripts/os/notebook_inactivity_check.py
@@ -64,7 +64,7 @@ if __name__ == "__main__":
             .format(notebook_config['os_user'], notebook_config['notebook_ip'], notebook_config['keyfile'], notebook_config['resource_type'], notebook_config['dataengine_ip'])
         try:
             # Run script to get available libs
-            subprocess.run("~/scripts/{}.py {}".format('check_inactivity', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('check_inactivity', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/os/notebook_install_libs.py b/infrastructure-provisioning/src/general/scripts/os/notebook_install_libs.py
index 32888be..50b9609 100644
--- a/infrastructure-provisioning/src/general/scripts/os/notebook_install_libs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/notebook_install_libs.py
@@ -62,7 +62,7 @@ if __name__ == "__main__":
                     notebook_config['keyfile'], notebook_config['libs'])
         try:
             # Run script to install additional libs
-            subprocess.run("~/scripts/{}.py {}".format('install_additional_libs', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('install_additional_libs', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/os/notebook_list_libs.py b/infrastructure-provisioning/src/general/scripts/os/notebook_list_libs.py
index 7a6c4fa..e6e989e 100644
--- a/infrastructure-provisioning/src/general/scripts/os/notebook_list_libs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/notebook_list_libs.py
@@ -61,7 +61,7 @@ if __name__ == "__main__":
             .format(notebook_config['os_user'], notebook_config['notebook_ip'], notebook_config['keyfile'], notebook_config['group_name'])
         try:
             # Run script to get available libs
-            subprocess.run("~/scripts/{}.py {}".format('get_list_available_pkgs', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('get_list_available_pkgs', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/os/notebook_reconfigure_spark.py b/infrastructure-provisioning/src/general/scripts/os/notebook_reconfigure_spark.py
index 95ea7c1..596d4d8 100644
--- a/infrastructure-provisioning/src/general/scripts/os/notebook_reconfigure_spark.py
+++ b/infrastructure-provisioning/src/general/scripts/os/notebook_reconfigure_spark.py
@@ -61,7 +61,7 @@ if __name__ == "__main__":
             .format(notebook_config['os_user'], notebook_config['notebook_ip'], notebook_config['keyfile'])
         try:
             # Run script to get available libs
-            subprocess.run("~/scripts/{}.py {}".format('reconfigure_spark', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('reconfigure_spark', params), shell=True, check=True)
         except:
             traceback.print_exc()
             raise Exception
diff --git a/infrastructure-provisioning/src/general/scripts/os/rstudio_dataengine_create_configs.py b/infrastructure-provisioning/src/general/scripts/os/rstudio_dataengine_create_configs.py
index f94c9f8..c7e0017 100644
--- a/infrastructure-provisioning/src/general/scripts/os/rstudio_dataengine_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/rstudio_dataengine_create_configs.py
@@ -53,30 +53,30 @@ spark_link = "https://archive.apache.org/dist/spark/spark-" + spark_version + "/
 def configure_rstudio():
     if not os.path.exists('/home/' + args.os_user + '/.ensure_dir/rstudio_dataengine_ensured'):
         try:
-            subprocess.run('echo "export R_LIBS_USER=' + cluster_dir + 'spark/R/lib:" >> /home/' + args.os_user + '/.bashrc', shell=True)
-            subprocess.run("sed -i 's/^SPARK_HOME/#SPARK_HOME/' /home/" + args.os_user + "/.Renviron", shell=True)
-            subprocess.run("sed -i 's/^YARN_CONF_DIR/#YARN_CONF_DIR/' /home/" + args.os_user + "/.Renviron", shell=True)
-            subprocess.run("sed -i 's/^HADOOP_CONF_DIR/#HADOOP_CONF_DIR/' /home/" + args.os_user + "/.Renviron", shell=True)
-            subprocess.run('echo \'SPARK_HOME="' + cluster_dir + 'spark/"\' >> /home/' + args.os_user + '/.Renviron', shell=True)
-            subprocess.run("sed -i 's/^master/#master/' /home/" + args.os_user + "/.Rprofile", shell=True)
+            subprocess.run('echo "export R_LIBS_USER=' + cluster_dir + 'spark/R/lib:" >> /home/' + args.os_user + '/.bashrc', shell=True, check=True)
+            subprocess.run("sed -i 's/^SPARK_HOME/#SPARK_HOME/' /home/" + args.os_user + "/.Renviron", shell=True, check=True)
+            subprocess.run("sed -i 's/^YARN_CONF_DIR/#YARN_CONF_DIR/' /home/" + args.os_user + "/.Renviron", shell=True, check=True)
+            subprocess.run("sed -i 's/^HADOOP_CONF_DIR/#HADOOP_CONF_DIR/' /home/" + args.os_user + "/.Renviron", shell=True, check=True)
+            subprocess.run('echo \'SPARK_HOME="' + cluster_dir + 'spark/"\' >> /home/' + args.os_user + '/.Renviron', shell=True, check=True)
+            subprocess.run("sed -i 's/^master/#master/' /home/" + args.os_user + "/.Rprofile", shell=True, check=True)
             subprocess.run('echo \'master="' + args.spark_master + '" # Cluster - "' + args.cluster_name + '" \' >> /home/' +
-                  args.os_user + '/.Rprofile', shell=True)
-            subprocess.run('''R -e "source('/home/{}/.Rprofile')"'''.format(args.os_user), shell=True)
-            subprocess.run('touch /home/' + args.os_user + '/.ensure_dir/rstudio_dataengine_ensured', shell=True)
+                  args.os_user + '/.Rprofile', shell=True, check=True)
+            subprocess.run('''R -e "source('/home/{}/.Rprofile')"'''.format(args.os_user), shell=True, check=True)
+            subprocess.run('touch /home/' + args.os_user + '/.ensure_dir/rstudio_dataengine_ensured', shell=True, check=True)
         except Exception as err:
             print('Error: {0}'.format(err))
             sys.exit(1)
     else:
         try:
-            subprocess.run("sed -i '/R_LIBS_USER/ { s|=\(.*\)|=\\1" + cluster_dir + "spark/R/lib:| }' /home/" + args.os_user + "/.bashrc", shell=True)
-            subprocess.run("sed -i 's/^SPARK_HOME/#SPARK_HOME/' /home/" + args.os_user + "/.Renviron", shell=True)
-            subprocess.run("sed -i 's/^YARN_CONF_DIR/#YARN_CONF_DIR/' /home/" + args.os_user + "/.Renviron", shell=True)
-            subprocess.run("sed -i 's/^HADOOP_CONF_DIR/#HADOOP_CONF_DIR/' /home/" + args.os_user + "/.Renviron", shell=True)
-            subprocess.run('echo \'SPARK_HOME="' + cluster_dir + 'spark/"\' >> /home/' + args.os_user + '/.Renviron', shell=True)
-            subprocess.run("sed -i 's/^master/#master/' /home/" + args.os_user + "/.Rprofile", shell=True)
+            subprocess.run("sed -i '/R_LIBS_USER/ { s|=\(.*\)|=\\1" + cluster_dir + "spark/R/lib:| }' /home/" + args.os_user + "/.bashrc", shell=True, check=True)
+            subprocess.run("sed -i 's/^SPARK_HOME/#SPARK_HOME/' /home/" + args.os_user + "/.Renviron", shell=True, check=True)
+            subprocess.run("sed -i 's/^YARN_CONF_DIR/#YARN_CONF_DIR/' /home/" + args.os_user + "/.Renviron", shell=True, check=True)
+            subprocess.run("sed -i 's/^HADOOP_CONF_DIR/#HADOOP_CONF_DIR/' /home/" + args.os_user + "/.Renviron", shell=True, check=True)
+            subprocess.run('echo \'SPARK_HOME="' + cluster_dir + 'spark/"\' >> /home/' + args.os_user + '/.Renviron', shell=True, check=True)
+            subprocess.run("sed -i 's/^master/#master/' /home/" + args.os_user + "/.Rprofile", shell=True, check=True)
             subprocess.run('echo \'master="' + args.spark_master + '" # Cluster - "' + args.cluster_name + '" \' >> /home/' +
-                  args.os_user + '/.Rprofile', shell=True)
-            subprocess.run('''R -e "source('/home/{}/.Rprofile')"'''.format(args.os_user), shell=True)
+                  args.os_user + '/.Rprofile', shell=True, check=True)
+            subprocess.run('''R -e "source('/home/{}/.Rprofile')"'''.format(args.os_user), shell=True, check=True)
         except Exception as err:
             print('Error: {0}'.format(err))
             sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/os/tensor-rstudio_dataengine_create_configs.py b/infrastructure-provisioning/src/general/scripts/os/tensor-rstudio_dataengine_create_configs.py
index b7561c1..c8965b6 100644
--- a/infrastructure-provisioning/src/general/scripts/os/tensor-rstudio_dataengine_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/tensor-rstudio_dataengine_create_configs.py
@@ -52,30 +52,30 @@ spark_link = "https://archive.apache.org/dist/spark/spark-" + spark_version + "/
 def configure_rstudio():
     if not os.path.exists('/home/' + args.os_user + '/.ensure_dir/rstudio_dataengine_ensured'):
         try:
-            subprocess.run('echo "export R_LIBS_USER=' + cluster_dir + 'spark/R/lib:" >> /home/' + args.os_user + '/.bashrc', shell=True)
-            subprocess.run("sed -i 's/^SPARK_HOME/#SPARK_HOME/' /home/" + args.os_user + "/.Renviron", shell=True)
-            subprocess.run("sed -i 's/^YARN_CONF_DIR/#YARN_CONF_DIR/' /home/" + args.os_user + "/.Renviron", shell=True)
-            subprocess.run("sed -i 's/^HADOOP_CONF_DIR/#HADOOP_CONF_DIR/' /home/" + args.os_user + "/.Renviron", shell=True)
-            subprocess.run('echo \'SPARK_HOME="' + cluster_dir + 'spark/"\' >> /home/' + args.os_user + '/.Renviron', shell=True)
-            subprocess.run("sed -i 's/^master/#master/' /home/" + args.os_user + "/.Rprofile", shell=True)
+            subprocess.run('echo "export R_LIBS_USER=' + cluster_dir + 'spark/R/lib:" >> /home/' + args.os_user + '/.bashrc', shell=True, check=True)
+            subprocess.run("sed -i 's/^SPARK_HOME/#SPARK_HOME/' /home/" + args.os_user + "/.Renviron", shell=True, check=True)
+            subprocess.run("sed -i 's/^YARN_CONF_DIR/#YARN_CONF_DIR/' /home/" + args.os_user + "/.Renviron", shell=True, check=True)
+            subprocess.run("sed -i 's/^HADOOP_CONF_DIR/#HADOOP_CONF_DIR/' /home/" + args.os_user + "/.Renviron", shell=True, check=True)
+            subprocess.run('echo \'SPARK_HOME="' + cluster_dir + 'spark/"\' >> /home/' + args.os_user + '/.Renviron', shell=True, check=True)
+            subprocess.run("sed -i 's/^master/#master/' /home/" + args.os_user + "/.Rprofile", shell=True, check=True)
             subprocess.run('echo \'master="' + args.spark_master + '" # Cluster - "' + args.cluster_name + '" \' >> /home/' +
-                  args.os_user + '/.Rprofile', shell=True)
-            subprocess.run('''R -e "source('/home/{}/.Rprofile')"'''.format(args.os_user), shell=True)
-            subprocess.run('touch /home/' + args.os_user + '/.ensure_dir/rstudio_dataengine_ensured', shell=True)
+                  args.os_user + '/.Rprofile', shell=True, check=True)
+            subprocess.run('''R -e "source('/home/{}/.Rprofile')"'''.format(args.os_user), shell=True, check=True)
+            subprocess.run('touch /home/' + args.os_user + '/.ensure_dir/rstudio_dataengine_ensured', shell=True, check=True)
         except Exception as err:
             print('Error: {0}'.format(err))
             sys.exit(1)
     else:
         try:
-            subprocess.run("sed -i '/R_LIBS_USER/ { s|=\(.*\)|=\\1" + cluster_dir + "spark/R/lib:| }' /home/" + args.os_user + "/.bashrc", shell=True)
-            subprocess.run("sed -i 's/^SPARK_HOME/#SPARK_HOME/' /home/" + args.os_user + "/.Renviron", shell=True)
-            subprocess.run("sed -i 's/^YARN_CONF_DIR/#YARN_CONF_DIR/' /home/" + args.os_user + "/.Renviron", shell=True)
-            subprocess.run("sed -i 's/^HADOOP_CONF_DIR/#HADOOP_CONF_DIR/' /home/" + args.os_user + "/.Renviron", shell=True)
-            subprocess.run('echo \'SPARK_HOME="' + cluster_dir + 'spark/"\' >> /home/' + args.os_user + '/.Renviron', shell=True)
-            subprocess.run("sed -i 's/^master/#master/' /home/" + args.os_user + "/.Rprofile", shell=True)
+            subprocess.run("sed -i '/R_LIBS_USER/ { s|=\(.*\)|=\\1" + cluster_dir + "spark/R/lib:| }' /home/" + args.os_user + "/.bashrc", shell=True, check=True)
+            subprocess.run("sed -i 's/^SPARK_HOME/#SPARK_HOME/' /home/" + args.os_user + "/.Renviron", shell=True, check=True)
+            subprocess.run("sed -i 's/^YARN_CONF_DIR/#YARN_CONF_DIR/' /home/" + args.os_user + "/.Renviron", shell=True, check=True)
+            subprocess.run("sed -i 's/^HADOOP_CONF_DIR/#HADOOP_CONF_DIR/' /home/" + args.os_user + "/.Renviron", shell=True, check=True)
+            subprocess.run('echo \'SPARK_HOME="' + cluster_dir + 'spark/"\' >> /home/' + args.os_user + '/.Renviron', shell=True, check=True)
+            subprocess.run("sed -i 's/^master/#master/' /home/" + args.os_user + "/.Rprofile", shell=True, check=True)
             subprocess.run('echo \'master="' + args.spark_master + '" # Cluster - "' + args.cluster_name + '" \' >> /home/' +
-                  args.os_user + '/.Rprofile', shell=True)
-            subprocess.run('''R -e "source('/home/{}/.Rprofile')"'''.format(args.os_user), shell=True)
+                  args.os_user + '/.Rprofile', shell=True, check=True)
+            subprocess.run('''R -e "source('/home/{}/.Rprofile')"'''.format(args.os_user), shell=True, check=True)
         except Exception as err:
             print('Error: {0}'.format(err))
             sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/os/tensor_dataengine_create_configs.py b/infrastructure-provisioning/src/general/scripts/os/tensor_dataengine_create_configs.py
index a6df4ee..bf6cada 100644
--- a/infrastructure-provisioning/src/general/scripts/os/tensor_dataengine_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/tensor_dataengine_create_configs.py
@@ -54,7 +54,7 @@ spark_link = "https://archive.apache.org/dist/spark/spark-" + spark_version + "/
 
 def pyspark_kernel(args):
     spark_path = '/opt/' + args.cluster_name + '/spark/'
-    subprocess.run('mkdir -p ' + kernels_dir + 'pyspark_' + args.cluster_name + '/', shell=True)
+    subprocess.run('mkdir -p ' + kernels_dir + 'pyspark_' + args.cluster_name + '/', shell=True, check=True)
     kernel_path = kernels_dir + "pyspark_" + args.cluster_name + "/kernel.json"
     template_file = "/tmp/{}/pyspark_dataengine_template.json".format(args.cluster_name)
     with open(template_file, 'r') as f:
@@ -68,13 +68,13 @@ def pyspark_kernel(args):
     text = text.replace('PYTHON_PATH', '/usr/bin/python3.8')
     with open(kernel_path, 'w') as f:
         f.write(text)
-    subprocess.run('touch /tmp/{}/kernel_var.json'.format(args.cluster_name), shell=True)
+    subprocess.run('touch /tmp/{}/kernel_var.json'.format(args.cluster_name), shell=True, check=True)
     subprocess.run(
         "PYJ=`find /opt/{0}/spark/ -name '*py4j*.zip' | tr '\\n' ':' | sed 's|:$||g'`; cat {1} | sed 's|PY4J|'$PYJ'|g' | sed \'/PYTHONPATH\"\:/s|\(.*\)\"|\\1/home/{2}/caffe/python:/home/{2}/pytorch/build:\"|\' > /tmp/{0}/kernel_var.json".
-        format(args.cluster_name, kernel_path, args.os_user), shell=True)
-    subprocess.run('sudo mv /tmp/{}/kernel_var.json '.format(args.cluster_name) + kernel_path, shell=True)
+        format(args.cluster_name, kernel_path, args.os_user), shell=True, check=True)
+    subprocess.run('sudo mv /tmp/{}/kernel_var.json '.format(args.cluster_name) + kernel_path, shell=True, check=True)
 
-    subprocess.run('mkdir -p ' + kernels_dir + 'py3spark_' + args.cluster_name + '/', shell=True)
+    subprocess.run('mkdir -p ' + kernels_dir + 'py3spark_' + args.cluster_name + '/', shell=True, check=True)
     kernel_path = kernels_dir + "py3spark_" + args.cluster_name + "/kernel.json"
     template_file = "/tmp/{}/pyspark_dataengine_template.json".format(args.cluster_name)
     with open(template_file, 'r') as f:
@@ -88,40 +88,40 @@ def pyspark_kernel(args):
     text = text.replace('PYTHON_PATH', '/usr/bin/python3.8')
     with open(kernel_path, 'w') as f:
         f.write(text)
-    subprocess.run('touch /tmp/{}/kernel_var.json'.format(args.cluster_name), shell=True)
+    subprocess.run('touch /tmp/{}/kernel_var.json'.format(args.cluster_name), shell=True, check=True)
     subprocess.run(
         "PYJ=`find /opt/{0}/spark/ -name '*py4j*.zip' | tr '\\n' ':' | sed 's|:$||g'`; cat {1} | sed 's|PY4J|'$PYJ'|g' | sed \'/PYTHONPATH\"\:/s|\(.*\)\"|\\1/home/{2}/caffe/python:/home/{2}/pytorch/build:\"|\' > /tmp/{0}/kernel_var.json".
-        format(args.cluster_name, kernel_path, args.os_user), shell=True)
-    subprocess.run('sudo mv /tmp/{}/kernel_var.json '.format(args.cluster_name) + kernel_path, shell=True)
+        format(args.cluster_name, kernel_path, args.os_user), shell=True, check=True)
+    subprocess.run('sudo mv /tmp/{}/kernel_var.json '.format(args.cluster_name) + kernel_path, shell=True, check=True)
 
 def install_sparkamagic_kernels(args):
     try:
-        subprocess.run('sudo jupyter nbextension enable --py --sys-prefix widgetsnbextension', shell=True)
-        sparkmagic_dir = subprocess.run("sudo pip3 show sparkmagic | grep 'Location: ' | awk '{print $2}'", capture_output=True, shell=True)
-        subprocess.run('sudo jupyter-kernelspec install {}/sparkmagic/kernels/sparkkernel --user'.format(sparkmagic_dir), shell=True)
-        subprocess.run('sudo jupyter-kernelspec install {}/sparkmagic/kernels/pysparkkernel --user'.format(sparkmagic_dir), shell=True)
+        subprocess.run('sudo jupyter nbextension enable --py --sys-prefix widgetsnbextension', shell=True, check=True)
+        sparkmagic_dir = subprocess.run("sudo pip3 show sparkmagic | grep 'Location: ' | awk '{print $2}'", capture_output=True, shell=True, check=True)
+        subprocess.run('sudo jupyter-kernelspec install {}/sparkmagic/kernels/sparkkernel --user'.format(sparkmagic_dir), shell=True, check=True)
+        subprocess.run('sudo jupyter-kernelspec install {}/sparkmagic/kernels/pysparkkernel --user'.format(sparkmagic_dir), shell=True, check=True)
 
         pyspark_kernel_name = 'PySpark (Python-3.8 / Spark-{0} ) [{1}]'.format(args.spark_version,
                                                                          args.cluster_name)
         subprocess.run('sed -i \'s|PySpark|{0}|g\' /home/{1}/.local/share/jupyter/kernels/pysparkkernel/kernel.json'.format(
-            pyspark_kernel_name, args.os_user), shell=True)
-        scala_version = subprocess.run('spark-submit --version 2>&1 | grep -o -P "Scala version \K.{0,7}"', capture_output=True, shell=True)
+            pyspark_kernel_name, args.os_user), shell=True, check=True)
+        scala_version = subprocess.run('spark-submit --version 2>&1 | grep -o -P "Scala version \K.{0,7}"', capture_output=True, shell=True, check=True)
         spark_kernel_name = 'Spark (Scala-{0} / Spark-{1} ) [{2}]'.format(scala_version, args.spark_version,
                                                                          args.cluster_name)
         subprocess.run('sed -i \'s|Spark|{0}|g\' /home/{1}/.local/share/jupyter/kernels/sparkkernel/kernel.json'.format(
-            spark_kernel_name, args.os_user), shell=True)
+            spark_kernel_name, args.os_user), shell=True, check=True)
 
         subprocess.run('sudo mv -f /home/{0}/.local/share/jupyter/kernels/pysparkkernel '
-              '/home/{0}/.local/share/jupyter/kernels/pysparkkernel_{1}'.format(args.os_user, args.cluster_name), shell=True)
+              '/home/{0}/.local/share/jupyter/kernels/pysparkkernel_{1}'.format(args.os_user, args.cluster_name), shell=True, check=True)
         subprocess.run('sudo mv -f /home/{0}/.local/share/jupyter/kernels/sparkkernel '
-              '/home/{0}/.local/share/jupyter/kernels/sparkkernel_{1}'.format(args.os_user, args.cluster_name), shell=True)
+              '/home/{0}/.local/share/jupyter/kernels/sparkkernel_{1}'.format(args.os_user, args.cluster_name), shell=True, check=True)
 
-        subprocess.run('mkdir -p /home/' + args.os_user + '/.sparkmagic', shell=True)
-        subprocess.run('cp -f /tmp/sparkmagic_config_template.json /home/' + args.os_user + '/.sparkmagic/config.json', shell=True)
+        subprocess.run('mkdir -p /home/' + args.os_user + '/.sparkmagic', shell=True, check=True)
+        subprocess.run('cp -f /tmp/sparkmagic_config_template.json /home/' + args.os_user + '/.sparkmagic/config.json', shell=True, check=True)
         spark_master_ip = args.spark_master.split('//')[1].split(':')[0]
         subprocess.run('sed -i \'s|LIVY_HOST|{0}|g\' /home/{1}/.sparkmagic/config.json'.format(
-                spark_master_ip, args.os_user), shell=True)
-        subprocess.run('sudo chown -R {0}:{0} /home/{0}/.sparkmagic/'.format(args.os_user), shell=True)
+                spark_master_ip, args.os_user), shell=True, check=True)
+        subprocess.run('sudo chown -R {0}:{0} /home/{0}/.sparkmagic/'.format(args.os_user), shell=True, check=True)
     except:
         sys.exit(1)
 
diff --git a/infrastructure-provisioning/src/general/scripts/os/zeppelin_dataengine_create_configs.py b/infrastructure-provisioning/src/general/scripts/os/zeppelin_dataengine_create_configs.py
index 3fa8f30..0058bcd 100644
--- a/infrastructure-provisioning/src/general/scripts/os/zeppelin_dataengine_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/zeppelin_dataengine_create_configs.py
@@ -62,38 +62,38 @@ def configure_zeppelin_dataengine_interpreter(cluster_name, cluster_dir, os_user
         default_port = 8998
         livy_port = ''
         livy_path = '/opt/' + cluster_name + '/livy/'
-        subprocess.run('echo \"Configuring Data Engine path for Zeppelin\"', shell=True)
+        subprocess.run('echo \"Configuring Data Engine path for Zeppelin\"', shell=True, check=True)
         subprocess.run('sed -i \"s/^export SPARK_HOME.*/export SPARK_HOME=\/opt\/' + cluster_name +
-              '\/spark/\" /opt/zeppelin/conf/zeppelin-env.sh', shell=True)
-        subprocess.run('sudo chown ' + os_user + ':' + os_user + ' -R /opt/zeppelin/', shell=True)
-        subprocess.run('sudo systemctl daemon-reload', shell=True)
-        subprocess.run('sudo service zeppelin-notebook stop', shell=True)
-        subprocess.run('sudo service zeppelin-notebook start', shell=True)
+              '\/spark/\" /opt/zeppelin/conf/zeppelin-env.sh', shell=True, check=True)
+        subprocess.run('sudo chown ' + os_user + ':' + os_user + ' -R /opt/zeppelin/', shell=True, check=True)
+        subprocess.run('sudo systemctl daemon-reload', shell=True, check=True)
+        subprocess.run('sudo service zeppelin-notebook stop', shell=True, check=True)
+        subprocess.run('sudo service zeppelin-notebook start', shell=True, check=True)
         while not zeppelin_restarted:
-            subprocess.run('sleep 5', shell=True)
-            result = subprocess.run('sudo bash -c "nmap -p 8080 localhost | grep closed > /dev/null" ; echo $?', capture_output=True, shell=True)
+            subprocess.run('sleep 5', shell=True, check=True)
+            result = subprocess.run('sudo bash -c "nmap -p 8080 localhost | grep closed > /dev/null" ; echo $?', capture_output=True, shell=True, check=True)
             result = result[:1]
             if result == '1':
                 zeppelin_restarted = True
-        subprocess.run('sleep 5', shell=True)
-        subprocess.run('echo \"Configuring Data Engine spark interpreter for Zeppelin\"', shell=True)
+        subprocess.run('sleep 5', shell=True, check=True)
+        subprocess.run('echo \"Configuring Data Engine spark interpreter for Zeppelin\"', shell=True, check=True)
         if multiple_clusters == 'true':
             while not port_number_found:
                 port_free = subprocess.run('sudo bash -c "nmap -p ' + str(default_port) +
-                                  ' localhost | grep closed > /dev/null" ; echo $?', capture_output=True, shell=True)
+                                  ' localhost | grep closed > /dev/null" ; echo $?', capture_output=True, shell=True, check=True)
                 port_free = port_free[:1]
                 if port_free == '0':
                     livy_port = default_port
                     port_number_found = True
                 else:
                     default_port += 1
-            subprocess.run('sudo echo "livy.server.port = ' + str(livy_port) + '" >> ' + livy_path + 'conf/livy.conf', shell=True)
-            subprocess.run('sudo echo "livy.spark.master = ' + spark_master + '" >> ' + livy_path + 'conf/livy.conf', shell=True)
+            subprocess.run('sudo echo "livy.server.port = ' + str(livy_port) + '" >> ' + livy_path + 'conf/livy.conf', shell=True, check=True)
+            subprocess.run('sudo echo "livy.spark.master = ' + spark_master + '" >> ' + livy_path + 'conf/livy.conf', shell=True, check=True)
             if os.path.exists(livy_path + 'conf/spark-blacklist.conf'):
-                subprocess.run('sudo sed -i "s/^/#/g" ' + livy_path + 'conf/spark-blacklist.conf', shell=True)
-            subprocess.run(''' sudo echo "export SPARK_HOME=''' + cluster_dir + '''spark/" >> ''' + livy_path + '''conf/livy-env.sh''', shell=True)
+                subprocess.run('sudo sed -i "s/^/#/g" ' + livy_path + 'conf/spark-blacklist.conf', shell=True, check=True)
+            subprocess.run(''' sudo echo "export SPARK_HOME=''' + cluster_dir + '''spark/" >> ''' + livy_path + '''conf/livy-env.sh''', shell=True, check=True)
             subprocess.run(''' sudo echo "export PYSPARK3_PYTHON=python3.8" >> ''' +
-                  livy_path + '''conf/livy-env.sh''', shell=True)
+                  livy_path + '''conf/livy-env.sh''', shell=True, check=True)
             template_file = "/tmp/{}/dataengine_interpreter.json".format(args.cluster_name)
             fr = open(template_file, 'r+')
             text = fr.read()
@@ -107,20 +107,20 @@ def configure_zeppelin_dataengine_interpreter(cluster_name, cluster_dir, os_user
             for _ in range(5):
                 try:
                     subprocess.run("curl --noproxy localhost -H 'Content-Type: application/json' -X POST -d " +
-                          "@/tmp/{}/dataengine_interpreter.json http://localhost:8080/api/interpreter/setting".format(args.cluster_name), shell=True)
+                          "@/tmp/{}/dataengine_interpreter.json http://localhost:8080/api/interpreter/setting".format(args.cluster_name), shell=True, check=True)
                     break
                 except:
-                    subprocess.run('sleep 5', shell=True)
+                    subprocess.run('sleep 5', shell=True, check=True)
             subprocess.run('sudo cp /opt/livy-server-cluster.service /etc/systemd/system/livy-server-' + str(livy_port) +
-                  '.service', shell=True)
+                  '.service', shell=True, check=True)
             subprocess.run("sudo sed -i 's|OS_USER|" + os_user + "|' /etc/systemd/system/livy-server-" + str(livy_port) +
-                  '.service', shell=True)
+                  '.service', shell=True, check=True)
             subprocess.run("sudo sed -i 's|LIVY_PATH|" + livy_path + "|' /etc/systemd/system/livy-server-" + str(livy_port)
-                  + '.service', shell=True)
-            subprocess.run('sudo chmod 644 /etc/systemd/system/livy-server-' + str(livy_port) + '.service', shell=True)
-            subprocess.run("sudo systemctl daemon-reload", shell=True)
-            subprocess.run("sudo systemctl enable livy-server-" + str(livy_port), shell=True)
-            subprocess.run('sudo systemctl start livy-server-' + str(livy_port), shell=True)
+                  + '.service', shell=True, check=True)
+            subprocess.run('sudo chmod 644 /etc/systemd/system/livy-server-' + str(livy_port) + '.service', shell=True, check=True)
+            subprocess.run("sudo systemctl daemon-reload", shell=True, check=True)
+            subprocess.run("sudo systemctl enable livy-server-" + str(livy_port), shell=True, check=True)
+            subprocess.run('sudo systemctl start livy-server-' + str(livy_port), shell=True, check=True)
         else:
             template_file = "/tmp/{}/dataengine_interpreter.json".format(args.cluster_name)
             p_versions = ["2", "3.8"]
@@ -140,30 +140,30 @@ def configure_zeppelin_dataengine_interpreter(cluster_name, cluster_dir, os_user
                     try:
                         subprocess.run("curl --noproxy localhost -H 'Content-Type: application/json' -X POST -d " +
                               "@/tmp/dataengine_spark_py" + p_version +
-                              "_interpreter.json http://localhost:8080/api/interpreter/setting", shell=True)
+                              "_interpreter.json http://localhost:8080/api/interpreter/setting", shell=True, check=True)
                         break
                     except:
-                        subprocess.run('sleep 5', shell=True)
-        subprocess.run('touch /home/' + os_user + '/.ensure_dir/dataengine_' + cluster_name + '_interpreter_ensured', shell=True)
+                        subprocess.run('sleep 5', shell=True, check=True)
+        subprocess.run('touch /home/' + os_user + '/.ensure_dir/dataengine_' + cluster_name + '_interpreter_ensured', shell=True, check=True)
     except Exception as err:
         print('Error: {0}'.format(err))
         sys.exit(1)
 
 
 def install_remote_livy(args):
-    subprocess.run('sudo chown ' + args.os_user + ':' + args.os_user + ' -R /opt/zeppelin/', shell=True)
-    subprocess.run('sudo service zeppelin-notebook stop', shell=True)
+    subprocess.run('sudo chown ' + args.os_user + ':' + args.os_user + ' -R /opt/zeppelin/', shell=True, check=True)
+    subprocess.run('sudo service zeppelin-notebook stop', shell=True, check=True)
     subprocess.run('sudo -i wget http://archive.cloudera.com/beta/livy/livy-server-' + args.livy_version + '.zip -O /opt/' +
-          args.cluster_name + '/livy-server-' + args.livy_version + '.zip', shell=True)
+          args.cluster_name + '/livy-server-' + args.livy_version + '.zip', shell=True, check=True)
     subprocess.run('sudo unzip /opt/' + args.cluster_name + '/livy-server-' + args.livy_version + '.zip -d /opt/' +
-          args.cluster_name + '/', shell=True)
+          args.cluster_name + '/', shell=True, check=True)
     subprocess.run('sudo mv /opt/' + args.cluster_name + '/livy-server-' + args.livy_version + '/ /opt/' + args.cluster_name +
-          '/livy/', shell=True)
+          '/livy/', shell=True, check=True)
     livy_path = '/opt/' + args.cluster_name + '/livy/'
-    subprocess.run('sudo mkdir -p ' + livy_path + '/logs', shell=True)
-    subprocess.run('sudo mkdir -p /var/run/livy', shell=True)
-    subprocess.run('sudo chown ' + args.os_user + ':' + args.os_user + ' -R /var/run/livy', shell=True)
-    subprocess.run('sudo chown ' + args.os_user + ':' + args.os_user + ' -R ' + livy_path, shell=True)
+    subprocess.run('sudo mkdir -p ' + livy_path + '/logs', shell=True, check=True)
+    subprocess.run('sudo mkdir -p /var/run/livy', shell=True, check=True)
+    subprocess.run('sudo chown ' + args.os_user + ':' + args.os_user + ' -R /var/run/livy', shell=True, check=True)
+    subprocess.run('sudo chown ' + args.os_user + ':' + args.os_user + ' -R ' + livy_path, shell=True, check=True)
 
 
 if __name__ == "__main__":
diff --git a/infrastructure-provisioning/src/jupyter/fabfile.py b/infrastructure-provisioning/src/jupyter/fabfile.py
index a9467a0..2c5d7bc 100644
--- a/infrastructure-provisioning/src/jupyter/fabfile.py
+++ b/infrastructure-provisioning/src/jupyter/fabfile.py
@@ -45,7 +45,7 @@ def run():
 
     try:
         params = "--uuid {}".format(notebook_config['uuid'])
-        subprocess.run("~/scripts/{}.py {}".format('common_prepare_notebook', params), shell=True)
+        subprocess.run("~/scripts/{}.py {}".format('common_prepare_notebook', params), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed preparing Notebook node.", str(err))
@@ -53,7 +53,7 @@ def run():
 
     try:
         params = "--uuid {}".format(notebook_config['uuid'])
-        subprocess.run("~/scripts/{}.py {}".format('jupyter_configure', params), shell=True)
+        subprocess.run("~/scripts/{}.py {}".format('jupyter_configure', params), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed configuring Notebook node.", str(err))
@@ -68,7 +68,7 @@ def terminate():
                         level=logging.DEBUG,
                         filename=local_log_filepath)
     try:
-        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed terminating Notebook node.", str(err))
@@ -83,7 +83,7 @@ def stop():
                         level=logging.DEBUG,
                         filename=local_log_filepath)
     try:
-        subprocess.run("~/scripts/{}.py".format('common_stop_notebook'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_stop_notebook'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed stopping Notebook node.", str(err))
@@ -99,7 +99,7 @@ def start():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('common_start_notebook'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_start_notebook'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed starting Notebook node.", str(err))
@@ -116,9 +116,9 @@ def configure():
 
     try:
         if os.environ['conf_resource'] == 'dataengine-service':
-            subprocess.run("~/scripts/{}.py".format('common_notebook_configure_dataengine-service'), shell=True)
+            subprocess.run("~/scripts/{}.py".format('common_notebook_configure_dataengine-service'), shell=True, check=True)
         elif os.environ['conf_resource'] == 'dataengine':
-            subprocess.run("~/scripts/{}.py".format('common_notebook_configure_dataengine'), shell=True)
+            subprocess.run("~/scripts/{}.py".format('common_notebook_configure_dataengine'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed configuring analytical tool on Notebook node.", str(err))
@@ -135,7 +135,7 @@ def install_libs():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('notebook_install_libs'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('notebook_install_libs'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed installing additional libs for Notebook node.", str(err))
@@ -152,7 +152,7 @@ def list_libs():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('notebook_list_libs'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('notebook_list_libs'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed get available libraries for notebook node.", str(err))
@@ -169,7 +169,7 @@ def git_creds():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('notebook_git_creds'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('notebook_git_creds'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to manage git credentials for notebook node.", str(err))
@@ -186,7 +186,7 @@ def create_image():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('common_create_notebook_image'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_create_notebook_image'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to create image from notebook node.", str(err))
@@ -203,7 +203,7 @@ def terminate_image():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook_image'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook_image'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to create image from notebook node.", str(err))
@@ -220,7 +220,7 @@ def reconfigure_spark():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('notebook_reconfigure_spark'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('notebook_reconfigure_spark'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to reconfigure Spark for Notebook node.", str(err))
@@ -236,7 +236,7 @@ def check_inactivity():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('notebook_inactivity_check'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('notebook_inactivity_check'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to check inactivity status.", str(err))
diff --git a/infrastructure-provisioning/src/jupyterlab/fabfile.py b/infrastructure-provisioning/src/jupyterlab/fabfile.py
index 99ecb14..7b42024 100644
--- a/infrastructure-provisioning/src/jupyterlab/fabfile.py
+++ b/infrastructure-provisioning/src/jupyterlab/fabfile.py
@@ -45,7 +45,7 @@ def run():
 
     try:
         params = "--uuid {}".format(notebook_config['uuid'])
-        subprocess.run("~/scripts/{}.py {}".format('common_prepare_notebook', params), shell=True)
+        subprocess.run("~/scripts/{}.py {}".format('common_prepare_notebook', params), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed preparing Notebook node.", str(err))
@@ -53,7 +53,7 @@ def run():
 
     try:
         params = "--uuid {}".format(notebook_config['uuid'])
-        subprocess.run("~/scripts/{}.py {}".format('jupyterlab_configure', params), shell=True)
+        subprocess.run("~/scripts/{}.py {}".format('jupyterlab_configure', params), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed configuring Notebook node.", str(err))
@@ -68,7 +68,7 @@ def terminate():
                         level=logging.DEBUG,
                         filename=local_log_filepath)
     try:
-        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed terminating Notebook node.", str(err))
@@ -83,7 +83,7 @@ def stop():
                         level=logging.DEBUG,
                         filename=local_log_filepath)
     try:
-        subprocess.run("~/scripts/{}.py".format('common_stop_notebook'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_stop_notebook'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed stopping Notebook node.", str(err))
@@ -99,7 +99,7 @@ def start():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('common_start_notebook'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_start_notebook'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed starting Notebook node.", str(err))
@@ -116,9 +116,9 @@ def configure():
 
     try:
         if os.environ['conf_resource'] == 'dataengine-service':
-            subprocess.run("~/scripts/{}.py".format('common_notebook_configure_dataengine-service'), shell=True)
+            subprocess.run("~/scripts/{}.py".format('common_notebook_configure_dataengine-service'), shell=True, check=True)
         elif os.environ['conf_resource'] == 'dataengine':
-            subprocess.run("~/scripts/{}.py".format('common_notebook_configure_dataengine'), shell=True)
+            subprocess.run("~/scripts/{}.py".format('common_notebook_configure_dataengine'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed configuring analytical tool on Notebook node.", str(err))
@@ -135,7 +135,7 @@ def install_libs():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('notebook_install_libs'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('notebook_install_libs'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed installing additional libs for Notebook node.", str(err))
@@ -152,7 +152,7 @@ def list_libs():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('notebook_list_libs'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('notebook_list_libs'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed get available libraries for notebook node.", str(err))
@@ -169,7 +169,7 @@ def git_creds():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('notebook_git_creds'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('notebook_git_creds'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to manage git credentials for notebook node.", str(err))
@@ -186,7 +186,7 @@ def create_image():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('common_create_notebook_image'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_create_notebook_image'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to create image from notebook node.", str(err))
@@ -203,7 +203,7 @@ def terminate_image():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook_image'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook_image'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to create image from notebook node.", str(err))
@@ -220,7 +220,7 @@ def reconfigure_spark():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('notebook_reconfigure_spark'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('notebook_reconfigure_spark'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to reconfigure Spark for Notebook node.", str(err))
diff --git a/infrastructure-provisioning/src/project/fabfile.py b/infrastructure-provisioning/src/project/fabfile.py
index ceb7e60..b9aac06 100644
--- a/infrastructure-provisioning/src/project/fabfile.py
+++ b/infrastructure-provisioning/src/project/fabfile.py
@@ -38,14 +38,14 @@ def run():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('project_prepare'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('project_prepare'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed preparing Project.", str(err))
         sys.exit(1)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('edge_configure'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('edge_configure'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed configuring Edge node.", str(err))
@@ -60,7 +60,7 @@ def terminate():
                         level=logging.DEBUG,
                         filename=local_log_filepath)
     try:
-        subprocess.run("~/scripts/{}.py".format('project_terminate'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('project_terminate'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed terminating Edge node.", str(err))
@@ -75,14 +75,14 @@ def recreate():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('project_prepare'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('project_prepare'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed preparing Edge node.", str(err))
         sys.exit(1)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('edge_configure'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('edge_configure'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed configuring Edge node.", str(err))
diff --git a/infrastructure-provisioning/src/project/scripts/reupload_ssh_key.py b/infrastructure-provisioning/src/project/scripts/reupload_ssh_key.py
index e1d5184..4f8483b 100644
--- a/infrastructure-provisioning/src/project/scripts/reupload_ssh_key.py
+++ b/infrastructure-provisioning/src/project/scripts/reupload_ssh_key.py
@@ -58,7 +58,7 @@ if __name__ == "__main__":
                 os.environ['conf_resource'], reupload_config['resource_id'],
                 reupload_config['os_user'],  reupload_config['keyfile'],
                 json.dumps(reupload_config['additional_config']))
-            subprocess.run("~/scripts/{}.py {}".format('common_reupload_key', params), shell=True)
+            subprocess.run("~/scripts/{}.py {}".format('common_reupload_key', params), shell=True, check=True)
         except Exception as err:
             traceback.print_exc()
             raise Exception
diff --git a/infrastructure-provisioning/src/rstudio/fabfile.py b/infrastructure-provisioning/src/rstudio/fabfile.py
index 205e1d2..4d20c18 100644
--- a/infrastructure-provisioning/src/rstudio/fabfile.py
+++ b/infrastructure-provisioning/src/rstudio/fabfile.py
@@ -45,7 +45,7 @@ def run():
 
     try:
         params = "--uuid {}".format(notebook_config['uuid'])
-        subprocess.run("~/scripts/{}.py {}".format('common_prepare_notebook', params), shell=True)
+        subprocess.run("~/scripts/{}.py {}".format('common_prepare_notebook', params), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed preparing Notebook node.", str(err))
@@ -53,7 +53,7 @@ def run():
 
     try:
         params = "--uuid {}".format(notebook_config['uuid'])
-        subprocess.run("~/scripts/{}.py {}".format('rstudio_configure', params), shell=True)
+        subprocess.run("~/scripts/{}.py {}".format('rstudio_configure', params), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed configuring Notebook node.", str(err))
@@ -68,7 +68,7 @@ def terminate():
                         level=logging.DEBUG,
                         filename=local_log_filepath)
     try:
-        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed terminating Notebook node.", str(err))
@@ -83,7 +83,7 @@ def stop():
                         level=logging.DEBUG,
                         filename=local_log_filepath)
     try:
-        subprocess.run("~/scripts/{}.py".format('common_stop_notebook'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_stop_notebook'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed stopping Notebook node.", str(err))
@@ -99,7 +99,7 @@ def start():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('common_start_notebook'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_start_notebook'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed starting Notebook node.", str(err))
@@ -116,9 +116,9 @@ def configure():
 
     try:
         if os.environ['conf_resource'] == 'dataengine-service':
-            subprocess.run("~/scripts/{}.py".format('common_notebook_configure_dataengine-service'), shell=True)
+            subprocess.run("~/scripts/{}.py".format('common_notebook_configure_dataengine-service'), shell=True, check=True)
         elif os.environ['conf_resource'] == 'dataengine':
-            subprocess.run("~/scripts/{}.py".format('common_notebook_configure_dataengine'), shell=True)
+            subprocess.run("~/scripts/{}.py".format('common_notebook_configure_dataengine'), shell=True, check=True)
     except Exception as err:
         append_result("Failed configuring analytical tool on Notebook node.", str(err))
         sys.exit(1)
@@ -134,7 +134,7 @@ def install_libs():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('notebook_install_libs'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('notebook_install_libs'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed installing additional libs for Notebook node.", str(err))
@@ -151,7 +151,7 @@ def list_libs():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('notebook_list_libs'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('notebook_list_libs'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed get available libraries for notebook node.", str(err))
@@ -168,7 +168,7 @@ def git_creds():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('notebook_git_creds'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('notebook_git_creds'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to manage git credentials for notebook node.", str(err))
@@ -185,7 +185,7 @@ def create_image():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('common_create_notebook_image'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_create_notebook_image'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to create image from notebook node.", str(err))
@@ -202,7 +202,7 @@ def terminate_image():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook_image'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook_image'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to create image from notebook node.", str(err))
@@ -219,7 +219,7 @@ def reconfigure_spark():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('notebook_reconfigure_spark'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('notebook_reconfigure_spark'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to reconfigure Spark for Notebook node.", str(err))
@@ -235,7 +235,7 @@ def check_inactivity():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('notebook_inactivity_check'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('notebook_inactivity_check'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to check inactivity status.", str(err))
diff --git a/infrastructure-provisioning/src/ssn/fabfile.py b/infrastructure-provisioning/src/ssn/fabfile.py
index 4dee3c4..79243cd 100644
--- a/infrastructure-provisioning/src/ssn/fabfile.py
+++ b/infrastructure-provisioning/src/ssn/fabfile.py
@@ -39,14 +39,14 @@ def run(ctx):
     ssn_config = dict()
     ssn_config['ssn_unique_index'] = str(uuid.uuid4())[:5]
     try:
-        subprocess.run("~/scripts/{}.py --ssn_unique_index {}".format('ssn_prepare', ssn_config['ssn_unique_index']), shell=True)
+        subprocess.run("~/scripts/{}.py --ssn_unique_index {}".format('ssn_prepare', ssn_config['ssn_unique_index']), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed preparing SSN node.", str(err))
         sys.exit(1)
 
     try:
-        subprocess.run("~/scripts/{}.py --ssn_unique_index {}".format('ssn_configure', ssn_config['ssn_unique_index']), shell=True)
+        subprocess.run("~/scripts/{}.py --ssn_unique_index {}".format('ssn_configure', ssn_config['ssn_unique_index']), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed configuring SSN node.", str(err))
@@ -61,7 +61,7 @@ def terminate(ctx):
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('ssn_terminate'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('ssn_terminate'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed terminating SSN node.", str(err))
diff --git a/infrastructure-provisioning/src/ssn/scripts/backup.py b/infrastructure-provisioning/src/ssn/scripts/backup.py
index 651f4a5..dc6d91c 100644
--- a/infrastructure-provisioning/src/ssn/scripts/backup.py
+++ b/infrastructure-provisioning/src/ssn/scripts/backup.py
@@ -53,18 +53,18 @@ args = parser.parse_args()
 
 def backup_prepare():
     try:
-        subprocess.run('mkdir {}'.format(temp_folder), shell=True)
+        subprocess.run('mkdir {}'.format(temp_folder), shell=True, check=True)
         if args.configs != 'skip':
-            subprocess.run('mkdir -p {0}conf'.format(temp_folder), shell=True)
+            subprocess.run('mkdir -p {0}conf'.format(temp_folder), shell=True, check=True)
         if args.keys != 'skip':
-            subprocess.run('mkdir -p {}keys'.format(temp_folder), shell=True)
+            subprocess.run('mkdir -p {}keys'.format(temp_folder), shell=True, check=True)
         if args.certs != 'skip':
-            subprocess.run('mkdir -p {}certs'.format(temp_folder), shell=True)
+            subprocess.run('mkdir -p {}certs'.format(temp_folder), shell=True, check=True)
         if args.jars != 'skip':
-            subprocess.run('mkdir -p {}jars'.format(temp_folder), shell=True)
+            subprocess.run('mkdir -p {}jars'.format(temp_folder), shell=True, check=True)
         if args.logs:
-            subprocess.run('mkdir -p {}logs'.format(temp_folder), shell=True)
-            subprocess.run('mkdir -p {}logs/docker'.format(temp_folder), shell=True)
+            subprocess.run('mkdir -p {}logs'.format(temp_folder), shell=True, check=True)
+            subprocess.run('mkdir -p {}logs/docker'.format(temp_folder), shell=True, check=True)
     except Exception as err:
         append_result(error='Failed to create temp folder. {}'.format(str(err)))
         sys.exit(1)
@@ -77,10 +77,10 @@ def backup_configs():
             print('Skipped config backup.')
         elif args.configs == 'all':
             subprocess.run("find {0}{2} -name '*yml' -exec cp {3} {1}{2} \;".format(args.datalab_path, temp_folder, conf_folder,
-                                                                           "{}"), shell=True)
+                                                                           "{}"), shell=True, check=True)
         else:
             for conf_file in args.configs.split(','):
-                subprocess.run('cp {0}{2}{3} {1}{2}'.format(args.datalab_path, temp_folder, conf_folder, conf_file), shell=True)
+                subprocess.run('cp {0}{2}{3} {1}{2}'.format(args.datalab_path, temp_folder, conf_folder, conf_file), shell=True, check=True)
     except:
         append_result(error='Backup configs failed.')
         sys.exit(1)
@@ -92,10 +92,10 @@ def backup_keys():
         if args.keys == 'skip':
             print('Skipped keys backup.')
         elif args.keys == 'all':
-            subprocess.run('cp {0}* {1}keys'.format(keys_folder, temp_folder), shell=True)
+            subprocess.run('cp {0}* {1}keys'.format(keys_folder, temp_folder), shell=True, check=True)
         else:
             for key_file in args.keys.split(','):
-                subprocess.run('cp {0}{1} {2}keys'.format(keys_folder, key_file, temp_folder), shell=True)
+                subprocess.run('cp {0}{1} {2}keys'.format(keys_folder, key_file, temp_folder), shell=True, check=True)
     except:
         append_result(error='Backup keys failed.')
         sys.exit(1)
@@ -108,12 +108,12 @@ def backup_certs():
             print('Skipped certs backup.')
         elif args.certs == 'all':
             for cert in all_certs:
-                subprocess.run('sudo cp {0}{1} {2}certs'.format(certs_folder, cert, temp_folder), shell=True)
-                subprocess.run('sudo chown {0}:{0} {1}certs/{2} '.format(os_user, temp_folder, cert), shell=True)
+                subprocess.run('sudo cp {0}{1} {2}certs'.format(certs_folder, cert, temp_folder), shell=True, check=True)
+                subprocess.run('sudo chown {0}:{0} {1}certs/{2} '.format(os_user, temp_folder, cert), shell=True, check=True)
         else:
             for cert in args.certs.split(','):
-                subprocess.run('cp {0}{1} {2}certs'.format(certs_folder, cert, temp_folder), shell=True)
-                subprocess.run('sudo chown {0}:{0} {1}certs/{2} '.format(os_user, temp_folder, cert), shell=True)
+                subprocess.run('cp {0}{1} {2}certs'.format(certs_folder, cert, temp_folder), shell=True, check=True)
+                subprocess.run('sudo chown {0}:{0} {1}certs/{2} '.format(os_user, temp_folder, cert), shell=True, check=True)
     except:
         append_result(error='Backup certs failed.')
         sys.exit(1)
@@ -127,10 +127,10 @@ def backup_jars():
         elif args.jars == 'all':
             for root, dirs, files in os.walk('{0}{1}'.format(args.datalab_path, jars_folder)):
                 for service in dirs:
-                    subprocess.run('cp -RP {0}{1}{2}* {3}jars'.format(args.datalab_path, jars_folder, service, temp_folder), shell=True)
+                    subprocess.run('cp -RP {0}{1}{2}* {3}jars'.format(args.datalab_path, jars_folder, service, temp_folder), shell=True, check=True)
         else:
             for service in args.jars.split(','):
-                subprocess.run('cp -RP {0}{1}{2}* {3}jars'.format(args.datalab_path, jars_folder, service, temp_folder), shell=True)
+                subprocess.run('cp -RP {0}{1}{2}* {3}jars'.format(args.datalab_path, jars_folder, service, temp_folder), shell=True, check=True)
     except:
         append_result(error='Backup jars failed.')
         sys.exit(1)
@@ -145,7 +145,7 @@ def backup_database():
             with settings(hide('running')):
                 subprocess.run("mongodump --host {0} --port {1} --username {2} --password '{3}' --db={4} --archive={5}mongo.db" \
                     .format(data['mongo']['host'], data['mongo']['port'], data['mongo']['username'],
-                            data['mongo']['password'], data['mongo']['database'], temp_folder), shell=True)
+                            data['mongo']['password'], data['mongo']['database'], temp_folder), shell=True, check=True)
     except:
         append_result(error='Backup db failed.')
         sys.exit(1)
@@ -156,11 +156,11 @@ def backup_logs():
         print('Backup logs: {}'.format(args.logs))
         if args.logs:
             print('Backup DataLab logs')
-            subprocess.run('cp -R {0}* {1}logs'.format(datalab_logs_folder, temp_folder), shell=True)
+            subprocess.run('cp -R {0}* {1}logs'.format(datalab_logs_folder, temp_folder), shell=True, check=True)
             print('Backup docker logs')
             subprocess.run("sudo find {0} -name '*log' -exec cp {2} {1}logs/docker \;".format(docker_logs_folder, temp_folder,
-                                                                                     "{}"), shell=True)
-            subprocess.run('sudo chown -R {0}:{0} {1}logs/docker'.format(os_user, temp_folder), shell=True)
+                                                                                     "{}"), shell=True, check=True)
+            subprocess.run('sudo chown -R {0}:{0} {1}logs/docker'.format(os_user, temp_folder), shell=True, check=True)
     except:
         append_result(error='Backup logs failed.')
         print('Backup logs failed.')
@@ -170,7 +170,7 @@ def backup_logs():
 def backup_finalize():
     try:
         print('Compressing all files to archive...')
-        subprocess.run('cd {0} && tar -zcf {1} .'.format(temp_folder, dest_file), shell=True)
+        subprocess.run('cd {0} && tar -zcf {1} .'.format(temp_folder, dest_file), shell=True, check=True)
     except Exception as err:
         append_result(error='Compressing backup failed. {}'.format(str(err)))
         sys.exit(1)
@@ -178,7 +178,7 @@ def backup_finalize():
     try:
         print('Clear temp folder...')
         if temp_folder != '/':
-            subprocess.run('rm -rf {}'.format(temp_folder), shell=True)
+            subprocess.run('rm -rf {}'.format(temp_folder), shell=True, check=True)
     except Exception as err:
         append_result(error='Clear temp folder failed. {}'.format(str(err)))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/ssn/scripts/configure_gitlab.py b/infrastructure-provisioning/src/ssn/scripts/configure_gitlab.py
index ccfb98a..70643c3 100644
--- a/infrastructure-provisioning/src/ssn/scripts/configure_gitlab.py
+++ b/infrastructure-provisioning/src/ssn/scripts/configure_gitlab.py
@@ -67,35 +67,35 @@ def prepare_config():
     try:
         with lcd('{}tmp/gitlab'.format(os.environ['conf_datalab_path'])):
             if os.path.exists('{}tmp/gitlab/gitlab.rb.bak'.format(os.environ['conf_datalab_path'])):
-                subprocess.run('cp gitlab.rb.bak gitlab.rb', shell=True)
+                subprocess.run('cp gitlab.rb.bak gitlab.rb', shell=True, check=True)
             else:
-                subprocess.run('cp gitlab.rb gitlab.rb.bak', shell=True)
+                subprocess.run('cp gitlab.rb gitlab.rb.bak', shell=True, check=True)
             if json.loads(os.environ['gitlab_ssl_enabled']):
-                subprocess.run('sed -i "s,EXTERNAL_URL,https://{}:443,g" gitlab.rb'.format(os.environ['instance_hostname']), shell=True)
-                subprocess.run('sed -i "s/.*NGINX_ENABLED/nginx[\'enable\'] = true/g" gitlab.rb', shell=True)
+                subprocess.run('sed -i "s,EXTERNAL_URL,https://{}:443,g" gitlab.rb'.format(os.environ['instance_hostname']), shell=True, check=True)
+                subprocess.run('sed -i "s/.*NGINX_ENABLED/nginx[\'enable\'] = true/g" gitlab.rb', shell=True, check=True)
                 subprocess.run(
                     'sed -i "s,.*NGINX_SSL_CERTIFICATE_KEY,nginx[\'ssl_certificate_key\'] = \'{}\',g" gitlab.rb'.format(
-                        os.environ['gitlab_ssl_certificate_key']), shell=True)
+                        os.environ['gitlab_ssl_certificate_key']), shell=True, check=True)
                 subprocess.run('sed -i "s,.*NGINX_SSL_CERTIFICATE,nginx[\'ssl_certificate\'] = \'{}\',g" gitlab.rb'.format(
-                    os.environ['gitlab_ssl_certificate']), shell=True)
+                    os.environ['gitlab_ssl_certificate']), shell=True, check=True)
                 subprocess.run('sed -i "s,.*NGINX_SSL_DHPARAMS.*,nginx[\'ssl_dhparam\'] = \'{}\',g" gitlab.rb'.format(
                     os.environ['gitlab_ssl_dhparams']))
                 if json.loads(os.environ['gitlab_https_redirect_enabled']):
-                    subprocess.run('sed -i "s,.*NGINX_REDIRECT_TO_HTTPS,nginx[\'redirect_http_to_https\'] = true,g" gitlab.rb', shell=True)
-                    subprocess.run('sed -i "s,.*NGINX_REDIRECT_PORT,nginx[\'redirect_http_to_https_port\'] = 80,g" gitlab.rb', shell=True)
+                    subprocess.run('sed -i "s,.*NGINX_REDIRECT_TO_HTTPS,nginx[\'redirect_http_to_https\'] = true,g" gitlab.rb', shell=True, check=True)
+                    subprocess.run('sed -i "s,.*NGINX_REDIRECT_PORT,nginx[\'redirect_http_to_https_port\'] = 80,g" gitlab.rb', shell=True, check=True)
             else:
-                subprocess.run('sed -i "s,EXTERNAL_URL,http://{},g" gitlab.rb'.format(os.environ['instance_hostname']), shell=True)
-
-            subprocess.run('sed -i "s/LDAP_HOST/{}/g" gitlab.rb'.format(os.environ['ldap_hostname']), shell=True)
-            subprocess.run('sed -i "s/LDAP_PORT/{}/g" gitlab.rb'.format(os.environ['ldap_port']), shell=True)
-            subprocess.run('sed -i "s/LDAP_UID/{}/g" gitlab.rb'.format(os.environ['ldap_uid']), shell=True)
-            subprocess.run('sed -i "s/LDAP_BIND_DN/{}/g" gitlab.rb'.format(os.environ['ldap_bind_dn']), shell=True)
-            subprocess.run("sed -i 's/LDAP_PASSWORD/{}/g' gitlab.rb".format(os.environ['ldap_password']), shell=True)
-            subprocess.run('sed -i "s/LDAP_BASE/{}/g" gitlab.rb'.format(os.environ['ldap_base']), shell=True)
-            subprocess.run("sed -i 's/LDAP_ATTR_USERNAME/{}/g' gitlab.rb".format(os.environ['ldap_attr_username']), shell=True)
-            subprocess.run("sed -i 's/LDAP_ATTR_EMAIL/{}/g' gitlab.rb".format(os.environ['ldap_attr_email']), shell=True)
-
-            subprocess.run("sed -i 's/GITLAB_ROOT_PASSWORD/{}/g' gitlab.rb".format(os.environ['gitlab_root_password']), shell=True)
+                subprocess.run('sed -i "s,EXTERNAL_URL,http://{},g" gitlab.rb'.format(os.environ['instance_hostname']), shell=True, check=True)
+
+            subprocess.run('sed -i "s/LDAP_HOST/{}/g" gitlab.rb'.format(os.environ['ldap_hostname']), shell=True, check=True)
+            subprocess.run('sed -i "s/LDAP_PORT/{}/g" gitlab.rb'.format(os.environ['ldap_port']), shell=True, check=True)
+            subprocess.run('sed -i "s/LDAP_UID/{}/g" gitlab.rb'.format(os.environ['ldap_uid']), shell=True, check=True)
+            subprocess.run('sed -i "s/LDAP_BIND_DN/{}/g" gitlab.rb'.format(os.environ['ldap_bind_dn']), shell=True, check=True)
+            subprocess.run("sed -i 's/LDAP_PASSWORD/{}/g' gitlab.rb".format(os.environ['ldap_password']), shell=True, check=True)
+            subprocess.run('sed -i "s/LDAP_BASE/{}/g" gitlab.rb'.format(os.environ['ldap_base']), shell=True, check=True)
+            subprocess.run("sed -i 's/LDAP_ATTR_USERNAME/{}/g' gitlab.rb".format(os.environ['ldap_attr_username']), shell=True, check=True)
+            subprocess.run("sed -i 's/LDAP_ATTR_EMAIL/{}/g' gitlab.rb".format(os.environ['ldap_attr_email']), shell=True, check=True)
+
+            subprocess.run("sed -i 's/GITLAB_ROOT_PASSWORD/{}/g' gitlab.rb".format(os.environ['gitlab_root_password']), shell=True, check=True)
         print('Initial config is ready.')
     except Exception as err:
         print('Failed to install gitlab.{}'.format(str(err)))
@@ -117,7 +117,7 @@ def install_gitlab():
 
         with lcd('{}tmp/gitlab'.format(os.environ['conf_datalab_path'])):
             conn.put('gitlab.rb', '/tmp/gitlab.rb')
-            subprocess.run('rm gitlab.rb', shell=True)
+            subprocess.run('rm gitlab.rb', shell=True, check=True)
         conn.sudo('rm /etc/gitlab/gitlab.rb')
         conn.sudo('mv /tmp/gitlab.rb /etc/gitlab/gitlab.rb')
 
diff --git a/infrastructure-provisioning/src/ssn/scripts/configure_ui.py b/infrastructure-provisioning/src/ssn/scripts/configure_ui.py
index 551fc21..ec9faf0 100644
--- a/infrastructure-provisioning/src/ssn/scripts/configure_ui.py
+++ b/infrastructure-provisioning/src/ssn/scripts/configure_ui.py
@@ -93,7 +93,7 @@ def copy_ssn_libraries():
     try:
         conn.sudo('mkdir -p /usr/lib/python3.8/datalab/')
         conn.run('mkdir -p /tmp/datalab_libs/')
-        subprocess.run('scp -i {} /usr/lib/python3.8/datalab/*.py {}:/tmp/datalab_libs/'.format(args.keyfile, host_string), shell=True)
+        subprocess.run('scp -i {} /usr/lib/python3.8/datalab/*.py {}:/tmp/datalab_libs/'.format(args.keyfile, host_string), shell=True, check=True)
         conn.run('chmod a+x /tmp/datalab_libs/*')
         conn.sudo('mv /tmp/datalab_libs/* /usr/lib/python3.8/datalab/')
         if exists('/usr/lib64'):
@@ -109,27 +109,27 @@ def configure_mongo(mongo_passwd, default_endpoint_name):
     try:
         if not exists("/lib/systemd/system/mongod.service"):
             if os.environ['conf_os_family'] == 'debian':
-                subprocess.run('sed -i "s/MONGO_USR/mongodb/g" /root/templates/mongod.service_template', shell=True)
+                subprocess.run('sed -i "s/MONGO_USR/mongodb/g" /root/templates/mongod.service_template', shell=True, check=True)
             elif os.environ['conf_os_family'] == 'redhat':
-                subprocess.run('sed -i "s/MONGO_USR/mongod/g" /root/templates/mongod.service_template', shell=True)
+                subprocess.run('sed -i "s/MONGO_USR/mongod/g" /root/templates/mongod.service_template', shell=True, check=True)
             subprocess.run('scp -i {} /root/templates/mongod.service_template {}:/tmp/mongod.service'.format(args.keyfile,
-                                                                                                    host_string), shell=True)
+                                                                                                    host_string), shell=True, check=True)
             conn.sudo('mv /tmp/mongod.service /lib/systemd/system/mongod.service')
             conn.sudo('systemctl daemon-reload')
             conn.sudo('systemctl enable mongod.service')
-        subprocess.run('sed -i "s|PASSWORD|{}|g" /root/scripts/resource_status.py'.format(mongo_passwd), shell=True)
+        subprocess.run('sed -i "s|PASSWORD|{}|g" /root/scripts/resource_status.py'.format(mongo_passwd), shell=True, check=True)
         subprocess.run('scp -i {} /root/scripts/resource_status.py {}:/tmp/resource_status.py'.format(args.keyfile,
-                                                                                             host_string), shell=True)
+                                                                                             host_string), shell=True, check=True)
         conn.sudo('mv /tmp/resource_status.py ' + os.environ['ssn_datalab_path'] + 'tmp/')
-        subprocess.run('sed -i "s|PASSWORD|{}|g" /root/scripts/configure_mongo.py'.format(mongo_passwd), shell=True)
+        subprocess.run('sed -i "s|PASSWORD|{}|g" /root/scripts/configure_mongo.py'.format(mongo_passwd), shell=True, check=True)
         subprocess.run('scp -i {} /root/scripts/configure_mongo.py {}:/tmp/configure_mongo.py'.format(args.keyfile,
-                                                                                             host_string), shell=True)
+                                                                                             host_string), shell=True, check=True)
         conn.sudo('mv /tmp/configure_mongo.py ' + args.datalab_path + 'tmp/')
         subprocess.run('scp -i {} /root/files/{}/mongo_roles.json {}:/tmp/mongo_roles.json'.format(args.keyfile,
                                                                                           args.cloud_provider,
-                                                                                          host_string), shell=True)
+                                                                                          host_string), shell=True, check=True)
         subprocess.run('scp -i {} /root/files/local_endpoint.json {}:/tmp/local_endpoint.json'.format(args.keyfile,
-                                                                                             host_string), shell=True)
+                                                                                             host_string), shell=True, check=True)
         conn.sudo('mv /tmp/mongo_roles.json ' + args.datalab_path + 'tmp/')
         conn.sudo('sed -i "s|DEF_ENDPOINT_NAME|{0}|g" /tmp/local_endpoint.json'.format(default_endpoint_name))
         conn.sudo('sed -i "s|CLOUD_PROVIDER|{0}|g" /tmp/local_endpoint.json'.format(
diff --git a/infrastructure-provisioning/src/ssn/scripts/docker_build.py b/infrastructure-provisioning/src/ssn/scripts/docker_build.py
index f018af3..010f257 100644
--- a/infrastructure-provisioning/src/ssn/scripts/docker_build.py
+++ b/infrastructure-provisioning/src/ssn/scripts/docker_build.py
@@ -49,24 +49,24 @@ else:
 
 def image_build(src_path, node):
     try:
-        if subprocess.run("cat /etc/lsb-release | grep DISTRIB_ID | awk -F '=' '{print $2}'", capture_output=True, shell=True).stdout == 'Ubuntu':
+        if subprocess.run("cat /etc/lsb-release | grep DISTRIB_ID | awk -F '=' '{print $2}'", capture_output=True, shell=True, check=True).stdout == 'Ubuntu':
             os_family = 'debian'
         else:
             os_family = 'redhat'
-        if subprocess.run("uname -r | awk -F '-' '{print $3}'", capture_output=True, shell=True).stdout == 'aws':
+        if subprocess.run("uname -r | awk -F '-' '{print $3}'", capture_output=True, shell=True, check=True).stdout == 'aws':
             cloud_provider = 'aws'
-        elif subprocess.run("uname -r | awk -F '-' '{print $3}'", capture_output=True, shell=True).stdout == 'azure':
+        elif subprocess.run("uname -r | awk -F '-' '{print $3}'", capture_output=True, shell=True, check=True).stdout == 'azure':
             cloud_provider = 'azure'
             if not exists('{}base/azure_auth.json'.format(src_path)):
-                subprocess.run('cp /home/datalab-user/keys/azure_auth.json {}base/azure_auth.json'.format(src_path), shell=True)
+                subprocess.run('cp /home/datalab-user/keys/azure_auth.json {}base/azure_auth.json'.format(src_path), shell=True, check=True)
         else:
             cloud_provider = 'gcp'
         subprocess.run('cd {2}; docker build --build-arg OS={0} --build-arg SRC_PATH= --file general/files/{1}/base_Dockerfile -t docker.datalab-base:latest .'.format(
-                    os_family, cloud_provider, src_path), shell=True)
+                    os_family, cloud_provider, src_path), shell=True, check=True)
         try:
             for i in range(len(node)):
                 subprocess.run('cd {3}; docker build --build-arg OS={0} --file general/files/{1}/{2}_Dockerfile -t docker.datalab-{2} .'.format(
-                            os_family, cloud_provider, node[i], src_path), shell=True)
+                            os_family, cloud_provider, node[i], src_path), shell=True, check=True)
         except Exception as err:
             print("Failed to build {} image".format(node[i]), str(err))
             raise Exception
diff --git a/infrastructure-provisioning/src/ssn/scripts/gitlab_deploy.py b/infrastructure-provisioning/src/ssn/scripts/gitlab_deploy.py
index bc34036..c83deab 100644
--- a/infrastructure-provisioning/src/ssn/scripts/gitlab_deploy.py
+++ b/infrastructure-provisioning/src/ssn/scripts/gitlab_deploy.py
@@ -55,10 +55,10 @@ def read_ini():
 
 def create_instance():
     try:
-        subprocess.run('mkdir -p ~/.aws', shell=True)
-        subprocess.run('touch ~/.aws/config', shell=True)
-        subprocess.run('echo "[default]" > ~/.aws/config', shell=True)
-        subprocess.run('echo "region = {}" >> ~/.aws/config'.format(os.environ['aws_region']), shell=True)
+        subprocess.run('mkdir -p ~/.aws', shell=True, check=True)
+        subprocess.run('touch ~/.aws/config', shell=True, check=True)
+        subprocess.run('echo "[default]" > ~/.aws/config', shell=True, check=True)
+        subprocess.run('echo "region = {}" >> ~/.aws/config'.format(os.environ['aws_region']), shell=True, check=True)
         ec2 = boto3.resource('ec2')
         security_groups_ids = []
         ami_id = get_ami_id(os.environ['aws_{}_ami_name'.format(os.environ['conf_os_family'])])
@@ -208,7 +208,7 @@ if __name__ == "__main__":
 
         # Main script for configure gitlab
         try:
-            subprocess.run('{0}/{1}.py {2}'.format(head, 'configure_gitlab', params), shell=True)
+            subprocess.run('{0}/{1}.py {2}'.format(head, 'configure_gitlab', params), shell=True, check=True)
         except Exception as err:
             print('Failed to configure gitlab. {}'.format(str(err)))
             terminate_gitlab()
diff --git a/infrastructure-provisioning/src/ssn/scripts/restore.py b/infrastructure-provisioning/src/ssn/scripts/restore.py
index 21fd024..9cb9a98 100644
--- a/infrastructure-provisioning/src/ssn/scripts/restore.py
+++ b/infrastructure-provisioning/src/ssn/scripts/restore.py
@@ -74,15 +74,15 @@ def restore_prepare():
                 print("Use folder path '{}' in --file key".format(temp_folder))
                 raise Exception
             print("Backup acrhive will be unpacked to: {}".format(temp_folder))
-            subprocess.run("mkdir {}".format(temp_folder), shell=True)
-            subprocess.run("tar -xf {0} -C {1}".format(backup_file, temp_folder), shell=True)
+            subprocess.run("mkdir {}".format(temp_folder), shell=True, check=True)
+            subprocess.run("tar -xf {0} -C {1}".format(backup_file, temp_folder), shell=True, check=True)
         elif os.path.isdir(backup_file):
             temp_folder = backup_file
         else:
             print("Please, specify file or folder. Try --help for more details.")
             raise Exception
         print("Backup acrhive: {} contains following files (exclude logs):".format(backup_file))
-        subprocess.run("find {} -not -name '*log'".format(temp_folder), shell=True)
+        subprocess.run("find {} -not -name '*log'".format(temp_folder), shell=True, check=True)
     except Exception as err:
         print("Failed to open backup.{}".format(str(err)))
         sys.exit(1)
@@ -91,14 +91,14 @@ def restore_prepare():
         if ask("Maybe you want to create backup of existing configuration before restoring?"):
             with settings(hide('everything')):
                 print("Creating new backup...")
-                subprocess.run("python3 backup.py --configs all --keys all --certs all --jar all --db", shell=True)
+                subprocess.run("python3 backup.py --configs all --keys all --certs all --jar all --db", shell=True, check=True)
     except:
         print("Failed to create new backup.")
         sys.exit(1)
 
     try:
         if ask("Stop all services before restoring?"):
-            subprocess.run("sudo supervisorctl stop all", shell=True)
+            subprocess.run("sudo supervisorctl stop all", shell=True, check=True)
         else:
             raise Exception
     except:
@@ -131,14 +131,14 @@ def restore_configs():
                         destfile = "{0}{1}{2}".format(args.datalab_path, conf_folder, filename)
                         if not filecmp.cmp(backupfile, destfile):
                             if ask("Config {} was changed, rewrite it?".format(filename)):
-                                subprocess.run("cp -f {0} {1}".format(backupfile, destfile), shell=True)
+                                subprocess.run("cp -f {0} {1}".format(backupfile, destfile), shell=True, check=True)
                             else:
                                 print("Config {} was skipped.".format(destfile))
                         else:
                             print("Config {} was not changed. Skipped.".format(filename))
                     else:
                         print("Config {} does not exist. Creating.".format(filename))
-                        subprocess.run("cp {0}{1}{2} {3}{1}{2}".format(temp_folder, conf_folder, filename, args.datalab_path), shell=True)
+                        subprocess.run("cp {0}{1}{2} {3}{1}{2}".format(temp_folder, conf_folder, filename, args.datalab_path), shell=True, check=True)
     except:
         print("Restore configs failed.")
 
@@ -165,14 +165,14 @@ def restore_keys():
                         print("Key {} already exist.".format(filename))
                         if not filecmp.cmp("{0}keys/{1}".format(temp_folder, filename), "{0}{1}".format(keys_folder, filename)):
                             if ask("Key {} was changed, rewrite it?".format(filename)):
-                                subprocess.run("cp -f {0}keys/{2} {1}{2}".format(temp_folder, keys_folder, filename), shell=True)
+                                subprocess.run("cp -f {0}keys/{2} {1}{2}".format(temp_folder, keys_folder, filename), shell=True, check=True)
                             else:
                                 print("Key {} was skipped.".format(filename))
                         else:
                             print("Key {} was not changed. Skipped.".format(filename))
                     else:
                         print("Key {} does not exist. Creating.".format(filename))
-                        subprocess.run("cp {0}keys/{2} {1}{2}".format(temp_folder, keys_folder, filename), shell=True)
+                        subprocess.run("cp {0}keys/{2} {1}{2}".format(temp_folder, keys_folder, filename), shell=True, check=True)
     except:
         print("Restore keys failed.")
 
@@ -199,16 +199,16 @@ def restore_certs():
                         print("Cert {} already exist.".format(filename))
                         if not filecmp.cmp("{0}certs/{1}".format(temp_folder, filename), "{0}{1}".format(certs_folder, filename)):
                             if ask("Cert {} was changed, rewrite it?".format(filename)):
-                                subprocess.run("sudo cp -f {0}certs/{2} {1}{2}".format(temp_folder, certs_folder, filename), shell=True)
-                                subprocess.run("sudo chown {0}:{0} {1}{2}".format("root", certs_folder, filename), shell=True)
+                                subprocess.run("sudo cp -f {0}certs/{2} {1}{2}".format(temp_folder, certs_folder, filename), shell=True, check=True)
+                                subprocess.run("sudo chown {0}:{0} {1}{2}".format("root", certs_folder, filename), shell=True, check=True)
                             else:
                                 print("Cert {} was skipped.".format(filename))
                         else:
                             print("Cert {} was not changed. Skipped.".format(filename))
                     else:
                         print("Cert {} does not exist. Creating.".format(filename))
-                        subprocess.run("sudo cp {0}certs/{2} {1}{2}".format(temp_folder, certs_folder, filename), shell=True)
-                        subprocess.run("sudo chown {0}:{0} {1}{2}".format("root", certs_folder, filename), shell=True)
+                        subprocess.run("sudo cp {0}certs/{2} {1}{2}".format(temp_folder, certs_folder, filename), shell=True, check=True)
+                        subprocess.run("sudo chown {0}:{0} {1}{2}".format("root", certs_folder, filename), shell=True, check=True)
     except:
         print("Restore certs failed.")
 
@@ -239,7 +239,7 @@ def restore_jars():
                                 destfile = "{0}{1}{2}/{3}".format(args.datalab_path, jars_folder, service, filename)
                                 if not filecmp.cmp(backupfile, destfile):
                                     if ask("Jar {} was changed, rewrite it?".format(filename)):
-                                        subprocess.run("cp -fP {0} {1}".format(backupfile, destfile), shell=True)
+                                        subprocess.run("cp -fP {0} {1}".format(backupfile, destfile), shell=True, check=True)
                                     else:
                                         print("Jar {} was skipped.".format(destfile))
                                 else:
@@ -247,7 +247,7 @@ def restore_jars():
                             else:
                                 print("Jar {} does not exist. Creating.".format(filename))
                                 subprocess.run("cp -P {0}jars/{1}/{2} {3}{4}{1}".format(temp_folder, service, filename,
-                                                                               args.datalab_path, jars_folder), shell=True)
+                                                                               args.datalab_path, jars_folder), shell=True, check=True)
     except:
         print("Restore jars failed.")
 
@@ -266,7 +266,7 @@ def restore_database():
                     print("Restoring database from backup")
                     subprocess.run("mongorestore --drop --host {0} --port {1} --archive={2}/mongo.db --username {3} --password '{4}' --authenticationDatabase={5}" \
                             .format(data['mongo']['host'], data['mongo']['port'], temp_folder,
-                                    data['mongo']['username'], data['mongo']['password'], data['mongo']['database']), shell=True)
+                                    data['mongo']['username'], data['mongo']['password'], data['mongo']['database']), shell=True, check=True)
         else:
             print("Restore database was skipped.")
     except:
@@ -276,13 +276,13 @@ def restore_database():
 def restore_finalize():
     try:
         if ask("Start all services after restoring?"):
-            subprocess.run("sudo supervisorctl start all", shell=True)
+            subprocess.run("sudo supervisorctl start all", shell=True, check=True)
     except:
         print("Failed to start all services.")
 
     try:
         if ask("Clean temporary folder {}?".format(temp_folder)) and temp_folder != "/":
-            subprocess.run("rm -rf {}".format(temp_folder), shell=True)
+            subprocess.run("rm -rf {}".format(temp_folder), shell=True, check=True)
     except Exception as err:
         print("Clear temp folder failed. {}".format(str(err)))
 
diff --git a/infrastructure-provisioning/src/superset/fabfile.py b/infrastructure-provisioning/src/superset/fabfile.py
index 5880053..b5b4e5d 100644
--- a/infrastructure-provisioning/src/superset/fabfile.py
+++ b/infrastructure-provisioning/src/superset/fabfile.py
@@ -45,7 +45,7 @@ def run():
 
     try:
         params = "--uuid {}".format(notebook_config['uuid'])
-        subprocess.run("~/scripts/{}.py {}".format('common_prepare_notebook', params), shell=True)
+        subprocess.run("~/scripts/{}.py {}".format('common_prepare_notebook', params), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed preparing Notebook node.", str(err))
@@ -53,7 +53,7 @@ def run():
 
     try:
         params = "--uuid {}".format(notebook_config['uuid'])
-        subprocess.run("~/scripts/{}.py {}".format('superset_configure', params), shell=True)
+        subprocess.run("~/scripts/{}.py {}".format('superset_configure', params), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed configuring Notebook node.", str(err))
@@ -68,7 +68,7 @@ def terminate():
                         level=logging.DEBUG,
                         filename=local_log_filepath)
     try:
-        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed terminating Notebook node.", str(err))
@@ -83,7 +83,7 @@ def stop():
                         level=logging.DEBUG,
                         filename=local_log_filepath)
     try:
-        subprocess.run("~/scripts/{}.py".format('common_stop_notebook'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_stop_notebook'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed stopping Notebook node.", str(err))
@@ -99,7 +99,7 @@ def start():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('common_start_notebook'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_start_notebook'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed starting Notebook node.", str(err))
@@ -115,7 +115,7 @@ def git_creds():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('notebook_git_creds'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('notebook_git_creds'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to manage git credentials for notebook node.", str(err))
@@ -132,7 +132,7 @@ def create_image():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('common_create_notebook_image'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_create_notebook_image'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to create image from notebook node.", str(err))
@@ -149,7 +149,7 @@ def terminate_image():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook_image'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook_image'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to create image from notebook node.", str(err))
@@ -168,7 +168,7 @@ def check_inactivity():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('notebook_inactivity_check'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('notebook_inactivity_check'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to check inactivity status.", str(err))
diff --git a/infrastructure-provisioning/src/tensor-rstudio/fabfile.py b/infrastructure-provisioning/src/tensor-rstudio/fabfile.py
index f490177..b6a5199 100644
--- a/infrastructure-provisioning/src/tensor-rstudio/fabfile.py
+++ b/infrastructure-provisioning/src/tensor-rstudio/fabfile.py
@@ -45,7 +45,7 @@ def run():
 
     try:
         params = "--uuid {}".format(notebook_config['uuid'])
-        subprocess.run("~/scripts/{}.py {}".format('common_prepare_notebook', params), shell=True)
+        subprocess.run("~/scripts/{}.py {}".format('common_prepare_notebook', params), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed preparing Notebook node.", str(err))
@@ -53,7 +53,7 @@ def run():
 
     try:
         params = "--uuid {}".format(notebook_config['uuid'])
-        subprocess.run("~/scripts/{}.py {}".format('tensor-rstudio_configure', params), shell=True)
+        subprocess.run("~/scripts/{}.py {}".format('tensor-rstudio_configure', params), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed configuring Notebook node.", str(err))
@@ -68,7 +68,7 @@ def terminate():
                         level=logging.DEBUG,
                         filename=local_log_filepath)
     try:
-        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed terminating Notebook node.", str(err))
@@ -83,7 +83,7 @@ def stop():
                         level=logging.DEBUG,
                         filename=local_log_filepath)
     try:
-        subprocess.run("~/scripts/{}.py".format('common_stop_notebook'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_stop_notebook'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed stopping Notebook node.", str(err))
@@ -99,7 +99,7 @@ def start():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('common_start_notebook'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_start_notebook'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed starting Notebook node.", str(err))
@@ -117,7 +117,7 @@ def configure():
 
     try:
         if os.environ['conf_resource'] == 'dataengine':
-            subprocess.run("~/scripts/{}.py".format('common_notebook_configure_dataengine'), shell=True)
+            subprocess.run("~/scripts/{}.py".format('common_notebook_configure_dataengine'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed configuring dataengine on Notebook node.", str(err))
@@ -134,7 +134,7 @@ def install_libs():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('notebook_install_libs'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('notebook_install_libs'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed installing additional libs for Notebook node.", str(err))
@@ -151,7 +151,7 @@ def list_libs():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('notebook_list_libs'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('notebook_list_libs'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed get available libraries for notebook node.", str(err))
@@ -168,7 +168,7 @@ def git_creds():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('notebook_git_creds'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('notebook_git_creds'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to manage git credentials for notebook node.", str(err))
@@ -185,7 +185,7 @@ def create_image():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('common_create_notebook_image'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_create_notebook_image'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to create image from notebook node.", str(err))
@@ -202,7 +202,7 @@ def terminate_image():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook_image'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook_image'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to create image from notebook node.", str(err))
@@ -219,7 +219,7 @@ def reconfigure_spark():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('notebook_reconfigure_spark'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('notebook_reconfigure_spark'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to reconfigure Spark for Notebook node.", str(err))
@@ -235,7 +235,7 @@ def check_inactivity():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('notebook_inactivity_check'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('notebook_inactivity_check'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to check inactivity status.", str(err))
diff --git a/infrastructure-provisioning/src/tensor/fabfile.py b/infrastructure-provisioning/src/tensor/fabfile.py
index 4d47869..8eec8e3 100644
--- a/infrastructure-provisioning/src/tensor/fabfile.py
+++ b/infrastructure-provisioning/src/tensor/fabfile.py
@@ -45,7 +45,7 @@ def run():
 
     try:
         params = "--uuid {}".format(notebook_config['uuid'])
-        subprocess.run("~/scripts/{}.py {}".format('common_prepare_notebook', params), shell=True)
+        subprocess.run("~/scripts/{}.py {}".format('common_prepare_notebook', params), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed preparing Notebook node.", str(err))
@@ -53,7 +53,7 @@ def run():
 
     try:
         params = "--uuid {}".format(notebook_config['uuid'])
-        subprocess.run("~/scripts/{}.py {}".format('tensor_configure', params), shell=True)
+        subprocess.run("~/scripts/{}.py {}".format('tensor_configure', params), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed configuring Notebook node.", str(err))
@@ -68,7 +68,7 @@ def terminate():
                         level=logging.DEBUG,
                         filename=local_log_filepath)
     try:
-        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed terminating Notebook node.", str(err))
@@ -83,7 +83,7 @@ def stop():
                         level=logging.DEBUG,
                         filename=local_log_filepath)
     try:
-        subprocess.run("~/scripts/{}.py".format('common_stop_notebook'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_stop_notebook'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed stopping Notebook node.", str(err))
@@ -99,7 +99,7 @@ def start():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('common_start_notebook'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_start_notebook'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed starting Notebook node.", str(err))
@@ -117,7 +117,7 @@ def configure():
 
     try:
         if os.environ['conf_resource'] == 'dataengine':
-            subprocess.run("~/scripts/{}.py".format('common_notebook_configure_dataengine'), shell=True)
+            subprocess.run("~/scripts/{}.py".format('common_notebook_configure_dataengine'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed configuring dataengine on Notebook node.", str(err))
@@ -134,7 +134,7 @@ def install_libs():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('notebook_install_libs'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('notebook_install_libs'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed installing additional libs for Notebook node.", str(err))
@@ -151,7 +151,7 @@ def list_libs():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('notebook_list_libs'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('notebook_list_libs'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed get available libraries for notebook node.", str(err))
@@ -168,7 +168,7 @@ def git_creds():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('notebook_git_creds'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('notebook_git_creds'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to manage git credentials for notebook node.", str(err))
@@ -185,7 +185,7 @@ def create_image():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('common_create_notebook_image'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_create_notebook_image'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to create image from notebook node.", str(err))
@@ -202,7 +202,7 @@ def terminate_image():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook_image'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook_image'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to create image from notebook node.", str(err))
@@ -219,7 +219,7 @@ def reconfigure_spark():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('notebook_reconfigure_spark'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('notebook_reconfigure_spark'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to reconfigure Spark for Notebook node.", str(err))
@@ -235,7 +235,7 @@ def check_inactivity():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('notebook_inactivity_check'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('notebook_inactivity_check'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to check inactivity status.", str(err))
diff --git a/infrastructure-provisioning/src/zeppelin/fabfile.py b/infrastructure-provisioning/src/zeppelin/fabfile.py
index 512486c..bae1fbc 100644
--- a/infrastructure-provisioning/src/zeppelin/fabfile.py
+++ b/infrastructure-provisioning/src/zeppelin/fabfile.py
@@ -44,7 +44,7 @@ def run():
 
     try:
         params = "--uuid {}".format(notebook_config['uuid'])
-        subprocess.run("~/scripts/{}.py {}".format('common_prepare_notebook', params), shell=True)
+        subprocess.run("~/scripts/{}.py {}".format('common_prepare_notebook', params), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed preparing Notebook node.", str(err))
@@ -52,7 +52,7 @@ def run():
 
     try:
         params = "--uuid {}".format(notebook_config['uuid'])
-        subprocess.run("~/scripts/{}.py {}".format('zeppelin_configure', params), shell=True)
+        subprocess.run("~/scripts/{}.py {}".format('zeppelin_configure', params), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed configuring Notebook node.", str(err))
@@ -67,7 +67,7 @@ def terminate():
                         level=logging.DEBUG,
                         filename=local_log_filepath)
     try:
-        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed terminating Notebook node.", str(err))
@@ -82,7 +82,7 @@ def stop():
                         level=logging.DEBUG,
                         filename=local_log_filepath)
     try:
-        subprocess.run("~/scripts/{}.py".format('common_stop_notebook'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_stop_notebook'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed stopping Notebook node.", str(err))
@@ -98,7 +98,7 @@ def start():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('common_start_notebook'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_start_notebook'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed starting Notebook node.", str(err))
@@ -115,9 +115,9 @@ def configure():
 
     try:
         if os.environ['conf_resource'] == 'dataengine-service':
-            subprocess.run("~/scripts/{}.py".format('common_notebook_configure_dataengine-service'), shell=True)
+            subprocess.run("~/scripts/{}.py".format('common_notebook_configure_dataengine-service'), shell=True, check=True)
         elif os.environ['conf_resource'] == 'dataengine':
-            subprocess.run("~/scripts/{}.py".format('common_notebook_configure_dataengine'), shell=True)
+            subprocess.run("~/scripts/{}.py".format('common_notebook_configure_dataengine'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed configuring analytical tool on Notebook node.", str(err))
@@ -134,7 +134,7 @@ def install_libs():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('notebook_install_libs'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('notebook_install_libs'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed installing additional libs for Notebook node.", str(err))
@@ -151,7 +151,7 @@ def list_libs():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('notebook_list_libs'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('notebook_list_libs'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed get available libraries for notebook node.", str(err))
@@ -168,7 +168,7 @@ def git_creds():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('notebook_git_creds'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('notebook_git_creds'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to manage git credentials for notebook node.", str(err))
@@ -185,7 +185,7 @@ def create_image():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('common_create_notebook_image'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_create_notebook_image'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to create image from notebook node.", str(err))
@@ -202,7 +202,7 @@ def terminate_image():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook_image'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('common_terminate_notebook_image'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to create image from notebook node.", str(err))
@@ -219,7 +219,7 @@ def reconfigure_spark():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('notebook_reconfigure_spark'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('notebook_reconfigure_spark'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to reconfigure Spark for Notebook node.", str(err))
@@ -235,7 +235,7 @@ def check_inactivity():
                         filename=local_log_filepath)
 
     try:
-        subprocess.run("~/scripts/{}.py".format('notebook_inactivity_check'), shell=True)
+        subprocess.run("~/scripts/{}.py".format('notebook_inactivity_check'), shell=True, check=True)
     except Exception as err:
         traceback.print_exc()
         append_result("Failed to check inactivity status.", str(err))
diff --git a/integration-tests/examples/copy_files.py b/integration-tests/examples/copy_files.py
index 66f56ba..6f9b99f 100644
--- a/integration-tests/examples/copy_files.py
+++ b/integration-tests/examples/copy_files.py
@@ -39,7 +39,7 @@ dataset_file = ['airports.csv', 'carriers.csv', '2008.csv.bz2']
 def download_dataset():
     try:
         for f in dataset_file:
-            subprocess.run('wget http://stat-computing.org/dataexpo/2009/{0} -O /tmp/{0}'.format(f), shell=True)
+            subprocess.run('wget http://stat-computing.org/dataexpo/2009/{0} -O /tmp/{0}'.format(f), shell=True, check=True)
     except Exception as err:
         print('Failed to download test dataset', str(err))
         sys.exit(1)
@@ -47,7 +47,7 @@ def download_dataset():
 def upload_aws():
     try:
         for f in dataset_file:
-            subprocess.run('aws s3 cp /tmp/{0} s3://{1}/{2}_dataset/ --sse AES256'.format(f, args.storage, args.notebook), shell=True)
+            subprocess.run('aws s3 cp /tmp/{0} s3://{1}/{2}_dataset/ --sse AES256'.format(f, args.storage, args.notebook), shell=True, check=True)
     except Exception as err:
         print('Failed to upload test dataset to bucket', str(err))
         sys.exit(1)
@@ -90,7 +90,7 @@ def upload_azure_blob():
 def upload_gcp():
     try:
         for f in dataset_file:
-            subprocess.run('sudo gsutil -m cp /tmp/{0} gs://{1}/{2}_dataset/'.format(f, args.storage, args.notebook), shell=True)
+            subprocess.run('sudo gsutil -m cp /tmp/{0} gs://{1}/{2}_dataset/'.format(f, args.storage, args.notebook), shell=True, check=True)
     except Exception as err:
         print('Failed to upload test dataset to bucket', str(err))
         sys.exit(1)
diff --git a/integration-tests/examples/scenario_deeplearning/deeplearning_tests.py b/integration-tests/examples/scenario_deeplearning/deeplearning_tests.py
index af038af..c0fc31e 100644
--- a/integration-tests/examples/scenario_deeplearning/deeplearning_tests.py
+++ b/integration-tests/examples/scenario_deeplearning/deeplearning_tests.py
@@ -39,17 +39,17 @@ args = parser.parse_args()
 
 def prepare_templates():
     try:
-        subprocess.run('/bin/bash -c "source /etc/profile && wget http://files.fast.ai/data/dogscats.zip -O /tmp/dogscats.zip"', shell=True)
-        subprocess.run('unzip -q /tmp/dogscats.zip -d /tmp', shell=True)
-        subprocess.run('/bin/bash -c "mkdir -p /home/{0}/{1}"'.format(args.os_user, "{test,train}"), shell=True)
-        subprocess.run('mv /tmp/dogscats/test1/* /home/{0}/test'.format(args.os_user), shell=True)
-        subprocess.run('/bin/bash -c "mv /tmp/dogscats/valid/{0}/* /home/{1}/train"'.format("{cats,dogs}", args.os_user), shell=True)
-        subprocess.run('/bin/bash -c "mv /tmp/dogscats/train/{0}/* /home/{1}/train"'.format("{cats,dogs}", args.os_user), shell=True)
+        subprocess.run('/bin/bash -c "source /etc/profile && wget http://files.fast.ai/data/dogscats.zip -O /tmp/dogscats.zip"', shell=True, check=True)
+        subprocess.run('unzip -q /tmp/dogscats.zip -d /tmp', shell=True, check=True)
+        subprocess.run('/bin/bash -c "mkdir -p /home/{0}/{1}"'.format(args.os_user, "{test,train}"), shell=True, check=True)
+        subprocess.run('mv /tmp/dogscats/test1/* /home/{0}/test'.format(args.os_user), shell=True, check=True)
+        subprocess.run('/bin/bash -c "mv /tmp/dogscats/valid/{0}/* /home/{1}/train"'.format("{cats,dogs}", args.os_user), shell=True, check=True)
+        subprocess.run('/bin/bash -c "mv /tmp/dogscats/train/{0}/* /home/{1}/train"'.format("{cats,dogs}", args.os_user), shell=True, check=True)
     except Exception as err:
         print('Failed to download/unpack image dataset!', str(err))
         sys.exit(1)
-    subprocess.run('mkdir -p /home/{0}/logs'.format(args.os_user), shell=True)
-    subprocess.run('mv /tmp/deeplearning /home/{0}/test_templates'.format(args.os_user), shell=True)
+    subprocess.run('mkdir -p /home/{0}/logs'.format(args.os_user), shell=True, check=True)
+    subprocess.run('mv /tmp/deeplearning /home/{0}/test_templates'.format(args.os_user), shell=True, check=True)
 
 def get_storage():
     storages = {"aws": args.storage,
@@ -70,7 +70,7 @@ def prepare_ipynb(kernel_name, template_path, ipynb_name):
 
 def run_ipynb(ipynb_name):
     subprocess.run('export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/opt/cudnn/lib64:/usr/local/cuda/lib64:/usr/lib64/openmpi/lib; ' \
-            'jupyter nbconvert --ExecutePreprocessor.timeout=-1 --ExecutePreprocessor.startup_timeout=300 --execute /home/{}/{}.ipynb'.format(args.os_user, ipynb_name), shell=True)
+            'jupyter nbconvert --ExecutePreprocessor.timeout=-1 --ExecutePreprocessor.startup_timeout=300 --execute /home/{}/{}.ipynb'.format(args.os_user, ipynb_name), shell=True, check=True)
 
 def run_tensor():
     interpreters = ['pyspark_local']
diff --git a/integration-tests/examples/scenario_jupyter/jupyter_tests.py b/integration-tests/examples/scenario_jupyter/jupyter_tests.py
index ce30ebd..85aec6b 100644
--- a/integration-tests/examples/scenario_jupyter/jupyter_tests.py
+++ b/integration-tests/examples/scenario_jupyter/jupyter_tests.py
@@ -37,7 +37,7 @@ args = parser.parse_args()
 
 
 def prepare_templates():
-    subprocess.run('mv /tmp/jupyter /home/{0}/test_templates'.format(args.os_user), shell=True)
+    subprocess.run('mv /tmp/jupyter /home/{0}/test_templates'.format(args.os_user), shell=True, check=True)
 
 def get_storage():
     storages = {"aws": args.storage,
@@ -59,7 +59,7 @@ def prepare_ipynb(kernel_name, template_path, ipynb_name):
         f.write(text)
 
 def run_ipynb(ipynb_name):
-    subprocess.run('jupyter nbconvert --ExecutePreprocessor.timeout=-1 --ExecutePreprocessor.startup_timeout=300 --execute /home/{}/{}.ipynb'.format(args.os_user, ipynb_name), shell=True)
+    subprocess.run('jupyter nbconvert --ExecutePreprocessor.timeout=-1 --ExecutePreprocessor.startup_timeout=300 --execute /home/{}/{}.ipynb'.format(args.os_user, ipynb_name), shell=True, check=True)
 
 def run_pyspark():
     interpreters = ['pyspark_local', 'pyspark_' + args.cluster_name]
diff --git a/integration-tests/examples/scenario_rstudio/rstudio_tests.py b/integration-tests/examples/scenario_rstudio/rstudio_tests.py
index 60b1519..6193b85 100644
--- a/integration-tests/examples/scenario_rstudio/rstudio_tests.py
+++ b/integration-tests/examples/scenario_rstudio/rstudio_tests.py
@@ -37,7 +37,7 @@ args = parser.parse_args()
 
 
 def prepare_templates():
-    subprocess.run('mv /tmp/rstudio /home/{0}/test_templates'.format(args.os_user), shell=True)
+    subprocess.run('mv /tmp/rstudio /home/{0}/test_templates'.format(args.os_user), shell=True, check=True)
 
 def get_storage():
     storages = {"aws": args.storage,
@@ -65,13 +65,13 @@ def prepare_rscript(template_path, rscript_name, kernel='remote'):
         f.write(text)
 
 def enable_local_kernel():
-    subprocess.run("sed -i 's/^master/#master/' /home/{0}/.Rprofile".format(args.os_user), shell=True)
-    subprocess.run('''sed -i "s/^/#/g" /home/{0}/.Renviron'''.format(args.os_user), shell=True)
-    subprocess.run('''sed -i "/\/opt\/spark\//s/#//g" /home/{0}/.Renviron'''.format(args.os_user), shell=True)
-    subprocess.run('rm -f metastore_db/db* derby.log', shell=True)
+    subprocess.run("sed -i 's/^master/#master/' /home/{0}/.Rprofile".format(args.os_user), shell=True, check=True)
+    subprocess.run('''sed -i "s/^/#/g" /home/{0}/.Renviron'''.format(args.os_user), shell=True, check=True)
+    subprocess.run('''sed -i "/\/opt\/spark\//s/#//g" /home/{0}/.Renviron'''.format(args.os_user), shell=True, check=True)
+    subprocess.run('rm -f metastore_db/db* derby.log', shell=True, check=True)
 
 def run_rscript(rscript_name):
-    subprocess.run('R < /home/{0}/{1}.r --no-save'.format(args.os_user, rscript_name), shell=True)
+    subprocess.run('R < /home/{0}/{1}.r --no-save'.format(args.os_user, rscript_name), shell=True, check=True)
 
 
 if __name__ == "__main__":
diff --git a/integration-tests/examples/scenario_tensor/tensor_tests.py b/integration-tests/examples/scenario_tensor/tensor_tests.py
index 9cc2c71..7d5ac05 100644
--- a/integration-tests/examples/scenario_tensor/tensor_tests.py
+++ b/integration-tests/examples/scenario_tensor/tensor_tests.py
@@ -38,17 +38,17 @@ args = parser.parse_args()
 
 def prepare_templates():
     try:
-        subprocess.run('/bin/bash -c "source /etc/profile && wget http://files.fast.ai/data/dogscats.zip -O /tmp/dogscats.zip"', shell=True)
-        subprocess.run('unzip -q /tmp/dogscats.zip -d /tmp', shell=True)
-        subprocess.run('/bin/bash -c "mkdir -p /home/{0}/{1}"'.format(args.os_user, "{test,train}"), shell=True)
-        subprocess.run('mv /tmp/dogscats/test1/* /home/{0}/test'.format(args.os_user), shell=True)
-        subprocess.run('/bin/bash -c "mv /tmp/dogscats/valid/{0}/* /home/{1}/train"'.format("{cats,dogs}", args.os_user), shell=True)
-        subprocess.run('/bin/bash -c "mv /tmp/dogscats/train/{0}/* /home/{1}/train"'.format("{cats,dogs}", args.os_user), shell=True)
+        subprocess.run('/bin/bash -c "source /etc/profile && wget http://files.fast.ai/data/dogscats.zip -O /tmp/dogscats.zip"', shell=True, check=True)
+        subprocess.run('unzip -q /tmp/dogscats.zip -d /tmp', shell=True, check=True)
+        subprocess.run('/bin/bash -c "mkdir -p /home/{0}/{1}"'.format(args.os_user, "{test,train}"), shell=True, check=True)
+        subprocess.run('mv /tmp/dogscats/test1/* /home/{0}/test'.format(args.os_user), shell=True, check=True)
+        subprocess.run('/bin/bash -c "mv /tmp/dogscats/valid/{0}/* /home/{1}/train"'.format("{cats,dogs}", args.os_user), shell=True, check=True)
+        subprocess.run('/bin/bash -c "mv /tmp/dogscats/train/{0}/* /home/{1}/train"'.format("{cats,dogs}", args.os_user), shell=True, check=True)
     except Exception as err:
         print('Failed to download/unpack image dataset!', str(err))
         sys.exit(1)
-    subprocess.run('mkdir -p /home/{0}/logs'.format(args.os_user), shell=True)
-    subprocess.run('mv /tmp/tensor /home/{0}/test_templates'.format(args.os_user), shell=True)
+    subprocess.run('mkdir -p /home/{0}/logs'.format(args.os_user), shell=True, check=True)
+    subprocess.run('mv /tmp/tensor /home/{0}/test_templates'.format(args.os_user), shell=True, check=True)
 
 def get_storage():
     storages = {"aws": args.storage,
@@ -69,7 +69,7 @@ def prepare_ipynb(kernel_name, template_path, ipynb_name):
 
 def run_ipynb(ipynb_name):
     subprocess.run('export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/opt/cudnn/lib64:/usr/local/cuda/lib64; ' \
-            'jupyter nbconvert --ExecutePreprocessor.timeout=-1 --ExecutePreprocessor.startup_timeout=300 --execute /home/{}/{}.ipynb'.format(args.os_user, ipynb_name), shell=True)
+            'jupyter nbconvert --ExecutePreprocessor.timeout=-1 --ExecutePreprocessor.startup_timeout=300 --execute /home/{}/{}.ipynb'.format(args.os_user, ipynb_name), shell=True, check=True)
 
 def run_tensor():
     interpreters = ['pyspark_local']
diff --git a/integration-tests/examples/scenario_zeppelin/zeppelin_tests.py b/integration-tests/examples/scenario_zeppelin/zeppelin_tests.py
index 0579e55..f4efdfa 100644
--- a/integration-tests/examples/scenario_zeppelin/zeppelin_tests.py
+++ b/integration-tests/examples/scenario_zeppelin/zeppelin_tests.py
@@ -38,7 +38,7 @@ args = parser.parse_args()
 
 
 def prepare_templates():
-    subprocess.run('mv /tmp/zeppelin /home/{0}/test_templates'.format(args.os_user), shell=True)
+    subprocess.run('mv /tmp/zeppelin /home/{0}/test_templates'.format(args.os_user), shell=True, check=True)
 
 def get_storage():
     storages = {"aws": args.storage,
@@ -52,7 +52,7 @@ def get_storage():
 
 def get_note_status(note_id, notebook_ip):
     running = False
-    subprocess.run('sleep 5', shell=True)
+    subprocess.run('sleep 5', shell=True, check=True)
     response = requests.get('http://{0}:8080/api/notebook/job/{1}'.format(notebook_ip, note_id))
     status = json.loads(response.content)
     for i in status.get('body'):
@@ -63,7 +63,7 @@ def get_note_status(note_id, notebook_ip):
             print('Error in notebook')
             sys.exit(1)
     if running:
-        subprocess.run('sleep 5', shell=True)
+        subprocess.run('sleep 5', shell=True, check=True)
         get_note_status(note_id, notebook_ip)
     else:
         return "OK"
@@ -113,7 +113,7 @@ def restart_interpreter(notebook_ip, interpreter):
         response = requests.put('http://{0}:8080/api/interpreter/setting/restart/{1}'.format(notebook_ip, id))
         status = json.loads(response.content)
         if status.get('status') == 'OK':
-            subprocess.run('sleep 5', shell=True)
+            subprocess.run('sleep 5', shell=True, check=True)
             return "OK"
         else:
             print('Failed to restart interpreter')
@@ -168,7 +168,7 @@ def run_spark():
 
 if __name__ == "__main__":
     try:
-        notebook_ip = subprocess.run('hostname -I', capture_output=True, shell=True)
+        notebook_ip = subprocess.run('hostname -I', capture_output=True, shell=True, check=True)
         prepare_templates()
         run_pyspark()
         run_sparkr()


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org


[incubator-datalab] 02/02: [DATALAB-2091]: removed unnecessary check

Posted by lf...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

lfrolov pushed a commit to branch DATALAB-2091
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git

commit 3dcfc4ea0f8439b80625ef8555565ebe873a12ec
Author: leonidfrolov <fr...@gmail.com>
AuthorDate: Wed Feb 17 10:57:19 2021 +0200

    [DATALAB-2091]: removed unnecessary check
---
 infrastructure-provisioning/src/general/scripts/gcp/ssn_configure.py | 2 --
 1 file changed, 2 deletions(-)

diff --git a/infrastructure-provisioning/src/general/scripts/gcp/ssn_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/ssn_configure.py
index 22694ec..6282be4 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/ssn_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/ssn_configure.py
@@ -150,8 +150,6 @@ if __name__ == "__main__":
 
         try:
             subprocess.run("~/scripts/{}.py {}".format('create_ssh_user', params), shell=True, check=True)
-            #if subprocess.returncode != 0:
-             #   raise Exception
         except:
             traceback.print_exc()
             raise Exception


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org