You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@datalab.apache.org by lf...@apache.org on 2021/10/11 15:00:18 UTC

[incubator-datalab] branch DATALAB-2409 updated (53efbf6 -> 5f4d257)

This is an automated email from the ASF dual-hosted git repository.

lfrolov pushed a change to branch DATALAB-2409
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git.


    from 53efbf6  [DATALAB-2409]: merged develop
     new 1638c1d  [DATALAB-2409]: replaced print with logging in all src/ .py scripts except in fabfiles and general/
     new 94919c3  [DATALAB-2409]: replaced print with logging in all general/scripts/azure .py scripts
     new d9593a5  [DATALAB-2409]: replaced print with logging in all general/scripts/gcp .py scripts
     new 5f4d257  [DATALAB-2409]: replaced print with logging in all general/scripts/os .py scripts

The 4 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../src/base/scripts/configure_keycloak.py         |  6 +-
 .../src/base/scripts/create_ssh_user.py            |  7 +-
 .../src/base/scripts/install_user_key.py           | 17 ++--
 .../src/dataengine/scripts/configure_dataengine.py | 51 ++++++------
 .../scripts/configure_deep_learning_node.py        | 57 ++++++-------
 .../src/edge/scripts/configure_http_proxy.py       |  5 +-
 .../edge/scripts/configure_nginx_reverse_proxy.py  | 16 +---
 .../src/edge/scripts/reupload_ssh_key.py           | 12 +--
 .../src/general/lib/aws/actions_lib.py             |  2 +-
 .../src/general/lib/aws/meta_lib.py                |  2 +-
 .../src/general/lib/azure/actions_lib.py           |  2 +-
 .../src/general/lib/azure/meta_lib.py              |  2 +-
 .../src/general/lib/gcp/actions_lib.py             |  2 +-
 .../src/general/lib/gcp/meta_lib.py                |  2 +-
 .../src/general/lib/os/debian/ssn_lib.py           |  1 +
 .../general/scripts/azure/common_collect_data.py   |  3 +-
 .../azure/common_create_datalake_directory.py      |  9 +-
 .../scripts/azure/common_create_instance.py        | 15 ++--
 .../scripts/azure/common_create_notebook_image.py  |  7 +-
 .../scripts/azure/common_create_security_group.py  |  9 +-
 .../scripts/azure/common_create_storage_account.py | 13 +--
 .../general/scripts/azure/common_create_subnet.py  |  9 +-
 .../scripts/azure/common_download_git_certfile.py  |  5 +-
 .../azure/common_notebook_configure_dataengine.py  | 15 +---
 .../scripts/azure/common_prepare_notebook.py       | 28 ++-----
 .../scripts/azure/common_remove_remote_kernels.py  |  5 +-
 .../general/scripts/azure/common_reupload_key.py   |  3 +-
 .../general/scripts/azure/common_start_notebook.py | 26 ++----
 .../general/scripts/azure/common_stop_notebook.py  | 22 ++---
 .../scripts/azure/common_terminate_notebook.py     | 21 ++---
 .../general/scripts/azure/dataengine_configure.py  | 41 +++------
 .../general/scripts/azure/dataengine_prepare.py    | 23 ++---
 .../src/general/scripts/azure/dataengine_start.py  | 18 ++--
 .../src/general/scripts/azure/dataengine_stop.py   | 17 ++--
 .../general/scripts/azure/dataengine_terminate.py  | 19 ++---
 .../scripts/azure/deeplearning_configure.py        | 46 ++++------
 .../src/general/scripts/azure/edge_configure.py    | 45 ++++------
 .../src/general/scripts/azure/edge_prepare.py      | 56 +++++--------
 .../src/general/scripts/azure/edge_start.py        | 23 ++---
 .../src/general/scripts/azure/edge_status.py       | 11 +--
 .../src/general/scripts/azure/edge_stop.py         | 14 +---
 .../src/general/scripts/azure/edge_terminate.py    | 46 +++++-----
 .../src/general/scripts/azure/jupyter_configure.py | 41 +++------
 .../general/scripts/azure/jupyterlab_configure.py  | 42 +++-------
 .../src/general/scripts/azure/project_prepare.py   | 46 ++++------
 .../src/general/scripts/azure/project_terminate.py | 54 +++++-------
 .../general/scripts/azure/rstudio_change_pass.py   |  7 +-
 .../src/general/scripts/azure/rstudio_configure.py | 46 ++++------
 .../src/general/scripts/azure/ssn_configure.py     | 59 +++++--------
 .../general/scripts/azure/ssn_create_datalake.py   |  9 +-
 .../general/scripts/azure/ssn_create_peering.py    |  5 +-
 .../scripts/azure/ssn_create_resource_group.py     |  7 +-
 .../src/general/scripts/azure/ssn_create_vpc.py    |  7 +-
 .../src/general/scripts/azure/ssn_prepare.py       | 29 ++-----
 .../src/general/scripts/azure/ssn_terminate.py     | 63 +++++++-------
 .../src/general/scripts/azure/tensor_configure.py  | 47 ++++-------
 .../general/scripts/azure/zeppelin_configure.py    | 41 +++------
 .../src/general/scripts/gcp/common_collect_data.py |  5 +-
 .../general/scripts/gcp/common_create_bucket.py    |  5 +-
 .../general/scripts/gcp/common_create_firewall.py  |  9 +-
 .../general/scripts/gcp/common_create_instance.py  |  5 +-
 .../general/scripts/gcp/common_create_nat_route.py |  5 +-
 .../scripts/gcp/common_create_notebook_image.py    |  9 +-
 .../scripts/gcp/common_create_service_account.py   | 15 ++--
 .../general/scripts/gcp/common_create_subnet.py    |  9 +-
 .../scripts/gcp/common_download_git_certfile.py    |  5 +-
 .../src/general/scripts/gcp/common_install_gpu.py  |  5 +-
 ...common_notebook_configure_dataengine-service.py | 15 +---
 .../gcp/common_notebook_configure_dataengine.py    | 15 +---
 .../general/scripts/gcp/common_prepare_notebook.py | 22 ++---
 .../src/general/scripts/gcp/common_reupload_key.py |  3 +-
 .../general/scripts/gcp/common_start_notebook.py   | 22 ++---
 .../general/scripts/gcp/common_stop_notebook.py    | 32 +++----
 .../scripts/gcp/common_terminate_notebook.py       | 29 +++----
 .../scripts/gcp/dataengine-service_configure.py    | 44 ++++------
 .../scripts/gcp/dataengine-service_create.py       |  5 +-
 .../scripts/gcp/dataengine-service_install_libs.py | 14 +---
 .../scripts/gcp/dataengine-service_list_libs.py    | 13 +--
 .../scripts/gcp/dataengine-service_prepare.py      | 15 +---
 .../scripts/gcp/dataengine-service_terminate.py    | 22 ++---
 .../general/scripts/gcp/dataengine_configure.py    | 41 +++------
 .../src/general/scripts/gcp/dataengine_prepare.py  | 21 ++---
 .../src/general/scripts/gcp/dataengine_start.py    | 16 +---
 .../src/general/scripts/gcp/dataengine_stop.py     | 15 +---
 .../general/scripts/gcp/dataengine_terminate.py    | 17 ++--
 .../general/scripts/gcp/deeplearning_configure.py  | 46 ++++------
 .../src/general/scripts/gcp/edge_configure.py      | 41 ++++-----
 .../general/scripts/gcp/edge_create_static_ip.py   | 13 +--
 .../src/general/scripts/gcp/edge_start.py          | 23 ++---
 .../src/general/scripts/gcp/edge_status.py         | 15 +---
 .../src/general/scripts/gcp/edge_stop.py           | 14 +---
 .../src/general/scripts/gcp/edge_terminate.py      | 32 +++----
 .../src/general/scripts/gcp/jupyter_configure.py   | 48 ++++-------
 .../general/scripts/gcp/jupyterlab_configure.py    | 48 ++++-------
 .../src/general/scripts/gcp/project_prepare.py     | 36 +++-----
 .../src/general/scripts/gcp/project_terminate.py   | 43 ++++------
 .../src/general/scripts/gcp/rstudio_configure.py   | 48 ++++-------
 .../rstudio_dataengine-service_create_configs.py   |  5 +-
 .../src/general/scripts/gcp/ssn_configure.py       | 52 +++++-------
 .../general/scripts/gcp/ssn_create_static_ip.py    | 12 +--
 .../src/general/scripts/gcp/ssn_create_vpc.py      |  7 +-
 .../src/general/scripts/gcp/ssn_finalize.py        |  3 +-
 .../src/general/scripts/gcp/ssn_prepare.py         | 22 ++---
 .../src/general/scripts/gcp/ssn_terminate.py       | 15 +---
 .../scripts/gcp/ssn_terminate_gcp_resources.py     | 45 +++++-----
 .../src/general/scripts/gcp/superset_configure.py  | 52 ++++--------
 .../scripts/gcp/tensor-rstudio_configure.py        | 50 ++++-------
 .../src/general/scripts/gcp/tensor_configure.py    | 48 ++++-------
 .../src/general/scripts/gcp/zeppelin_configure.py  | 42 ++++------
 .../general/scripts/os/common_clean_instance.py    | 17 ++--
 .../general/scripts/os/common_configure_proxy.py   |  5 +-
 .../scripts/os/common_configure_reverse_proxy.py   |  7 +-
 .../general/scripts/os/common_configure_spark.py   |  7 +-
 .../scripts/os/configure_proxy_for_docker.py       |  7 +-
 .../general/scripts/os/dataengine_install_libs.py  | 14 +---
 .../src/general/scripts/os/dataengine_list_libs.py | 14 +---
 .../scripts/os/dataengine_reconfigure_spark.py     | 18 ++--
 .../general/scripts/os/get_list_available_pkgs.py  |  5 +-
 .../general/scripts/os/install_additional_libs.py  | 17 ++--
 .../os/jupyter_install_dataengine_kernels.py       |  3 +-
 .../scripts/os/jupyterlab_container_start.py       |  7 +-
 .../src/general/scripts/os/manage_git_creds.py     |  9 +-
 .../src/general/scripts/os/notebook_git_creds.py   | 12 +--
 .../scripts/os/notebook_inactivity_check.py        | 13 +--
 .../general/scripts/os/notebook_install_libs.py    | 14 +---
 .../src/general/scripts/os/notebook_list_libs.py   | 14 +---
 .../scripts/os/notebook_reconfigure_spark.py       | 18 ++--
 .../os/rstudio_dataengine_create_configs.py        |  5 +-
 .../src/general/scripts/os/superset_start.py       |  7 +-
 .../os/tensor-rstudio_dataengine_create_configs.py |  5 +-
 .../os/tensor_install_dataengine_kernels.py        |  3 +-
 .../os/zeppelin_dataengine_create_configs.py       |  3 +-
 .../src/jupyter/scripts/configure_jupyter_node.py  | 51 ++++++------
 .../scripts/configure_jupyterlab_node.py           | 17 ++--
 .../src/project/scripts/configure_http_proxy.py    |  5 +-
 .../src/project/scripts/configure_nftables.py      |  5 +-
 .../scripts/configure_nginx_reverse_proxy.py       | 18 ++--
 .../src/project/scripts/reupload_ssh_key.py        | 12 +--
 .../src/rstudio/scripts/configure_rstudio_node.py  | 31 +++----
 .../src/ssn/scripts/backup.py                      | 37 +++++----
 .../src/ssn/scripts/configure_billing.py           | 11 +--
 .../src/ssn/scripts/configure_conf_file.py         |  9 +-
 .../src/ssn/scripts/configure_docker.py            | 31 +++----
 .../src/ssn/scripts/configure_gitlab.py            | 23 ++---
 .../src/ssn/scripts/configure_mongo.py             |  7 +-
 .../src/ssn/scripts/configure_ssn_node.py          | 49 +++++------
 .../src/ssn/scripts/configure_ui.py                | 26 +++---
 .../src/ssn/scripts/docker_build.py                |  3 +-
 .../src/ssn/scripts/gitlab_deploy.py               | 41 ++++-----
 .../src/ssn/scripts/resource_status.py             |  5 +-
 .../src/ssn/scripts/restore.py                     | 97 +++++++++++-----------
 .../src/ssn/scripts/upload_response_file.py        |  8 +-
 .../superset/scripts/configure_superset_node.py    | 15 ++--
 .../scripts/configure_tensor-rstudio_node.py       | 37 +++++----
 .../src/tensor/scripts/configure_tensor_node.py    | 43 +++++-----
 .../zeppelin/scripts/configure_zeppelin_node.py    | 51 ++++++------
 156 files changed, 1291 insertions(+), 1994 deletions(-)

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org


[incubator-datalab] 02/04: [DATALAB-2409]: replaced print with logging in all general/scripts/azure .py scripts

Posted by lf...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

lfrolov pushed a commit to branch DATALAB-2409
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git

commit 94919c31e428022564cbe6881dfb300c3050169d
Author: leonidfrolov <fr...@gmail.com>
AuthorDate: Mon Oct 11 17:27:01 2021 +0300

    [DATALAB-2409]: replaced print with logging in all general/scripts/azure .py scripts
---
 .../general/scripts/azure/common_collect_data.py   |  3 +-
 .../azure/common_create_datalake_directory.py      |  9 ++--
 .../scripts/azure/common_create_instance.py        | 15 +++---
 .../scripts/azure/common_create_notebook_image.py  |  7 +--
 .../scripts/azure/common_create_security_group.py  |  9 ++--
 .../scripts/azure/common_create_storage_account.py | 13 ++---
 .../general/scripts/azure/common_create_subnet.py  |  9 ++--
 .../scripts/azure/common_download_git_certfile.py  |  5 +-
 .../azure/common_notebook_configure_dataengine.py  | 15 ++----
 .../scripts/azure/common_prepare_notebook.py       | 28 ++++------
 .../scripts/azure/common_remove_remote_kernels.py  |  5 +-
 .../general/scripts/azure/common_reupload_key.py   |  3 +-
 .../general/scripts/azure/common_start_notebook.py | 26 +++------
 .../general/scripts/azure/common_stop_notebook.py  | 22 +++-----
 .../scripts/azure/common_terminate_notebook.py     | 21 +++-----
 .../general/scripts/azure/dataengine_configure.py  | 41 ++++----------
 .../general/scripts/azure/dataengine_prepare.py    | 23 +++-----
 .../src/general/scripts/azure/dataengine_start.py  | 18 ++-----
 .../src/general/scripts/azure/dataengine_stop.py   | 17 ++----
 .../general/scripts/azure/dataengine_terminate.py  | 19 +++----
 .../scripts/azure/deeplearning_configure.py        | 46 ++++++----------
 .../src/general/scripts/azure/edge_configure.py    | 45 ++++++----------
 .../src/general/scripts/azure/edge_prepare.py      | 56 ++++++++-----------
 .../src/general/scripts/azure/edge_start.py        | 23 +++-----
 .../src/general/scripts/azure/edge_status.py       | 11 +---
 .../src/general/scripts/azure/edge_stop.py         | 14 ++---
 .../src/general/scripts/azure/edge_terminate.py    | 46 +++++++---------
 .../src/general/scripts/azure/jupyter_configure.py | 41 +++++---------
 .../general/scripts/azure/jupyterlab_configure.py  | 42 +++++----------
 .../src/general/scripts/azure/project_prepare.py   | 46 ++++++----------
 .../src/general/scripts/azure/project_terminate.py | 54 ++++++++-----------
 .../general/scripts/azure/rstudio_change_pass.py   |  7 +--
 .../src/general/scripts/azure/rstudio_configure.py | 46 ++++++----------
 .../src/general/scripts/azure/ssn_configure.py     | 59 ++++++++------------
 .../general/scripts/azure/ssn_create_datalake.py   |  9 ++--
 .../general/scripts/azure/ssn_create_peering.py    |  5 +-
 .../scripts/azure/ssn_create_resource_group.py     |  7 +--
 .../src/general/scripts/azure/ssn_create_vpc.py    |  7 +--
 .../src/general/scripts/azure/ssn_prepare.py       | 29 +++-------
 .../src/general/scripts/azure/ssn_terminate.py     | 63 ++++++++++------------
 .../src/general/scripts/azure/tensor_configure.py  | 47 ++++++----------
 .../general/scripts/azure/zeppelin_configure.py    | 41 +++++---------
 42 files changed, 379 insertions(+), 673 deletions(-)

diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_collect_data.py b/infrastructure-provisioning/src/general/scripts/azure/common_collect_data.py
index 11a62db..ee8eda7 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_collect_data.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_collect_data.py
@@ -29,6 +29,7 @@ import traceback
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -44,7 +45,7 @@ if __name__ == "__main__":
             data_instances = AzureMeta().get_list_instance_statuses(args.resource_group_name, data.get('host'))
             statuses['host'] = data_instances
         except:
-            print("Hosts JSON wasn't been provided")
+            logging.error("Hosts JSON wasn't been provided")
         with open('/root/result.json', 'w') as outfile:
             json.dump(statuses, outfile)
     except Exception as err:
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_create_datalake_directory.py b/infrastructure-provisioning/src/general/scripts/azure/common_create_datalake_directory.py
index 29ff0c2..f93260d 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_create_datalake_directory.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_create_datalake_directory.py
@@ -26,6 +26,7 @@ import sys
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--resource_group_name', type=str, default='')
@@ -42,10 +43,10 @@ if __name__ == "__main__":
         for datalake in AzureMeta().list_datalakes(args.resource_group_name):
             if args.datalake_name == datalake.tags["Name"]:
                 if AzureMeta().verify_datalake_directory(datalake.name, args.directory_name):
-                    print("Data Lake Store Directory '{}' already exist".format(args.directory_name))
+                    logging.info("Data Lake Store Directory '{}' already exist".format(args.directory_name))
                 else:
                     AzureActions().create_datalake_directory(datalake.name, args.directory_name)
-                    print("Data Lake Store Directory '{}' has been created".format(args.directory_name))
+                    logging.info("Data Lake Store Directory '{}' has been created".format(args.directory_name))
                     if args.ad_user != '':
                        AzureActions().set_user_permissions_to_datalake_directory(
                            datalake.name, '/{}'.format(args.directory_name), args.ad_user)
@@ -57,8 +58,8 @@ if __name__ == "__main__":
                                                                 ad_group=args.ad_group)
                 datalake_exists = True
         if not datalake_exists:
-            print("Requested Data Lake Store '{}' is missing".format(datalake.name))
+            logging.info("Requested Data Lake Store '{}' is missing".format(datalake.name))
             sys.exit(1)
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_create_instance.py b/infrastructure-provisioning/src/general/scripts/azure/common_create_instance.py
index 5ad8253..3f284d3 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_create_instance.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_create_instance.py
@@ -26,6 +26,7 @@ import json
 import sys
 from datalab.actions_lib import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--instance_name', type=str, default='')
@@ -56,25 +57,25 @@ if __name__ == "__main__":
     if args.instance_name != '':
         try:
             if AzureMeta().get_instance(args.resource_group_name, args.instance_name):
-                print("REQUESTED INSTANCE {} ALREADY EXISTS".format(args.instance_name))
+                logging.info("REQUESTED INSTANCE {} ALREADY EXISTS".format(args.instance_name))
             else:
                 if args.public_ip_name != 'None':
                     if AzureMeta().get_static_ip(args.resource_group_name, args.public_ip_name):
-                        print("REQUESTED PUBLIC IP ADDRESS {} ALREADY EXISTS.".format(args.public_ip_name))
+                        logging.info("REQUESTED PUBLIC IP ADDRESS {} ALREADY EXISTS.".format(args.public_ip_name))
                         static_public_ip_address = AzureMeta().get_static_ip(
                             args.resource_group_name, args.public_ip_name).ip_address
                     else:
-                        print("Creating Static IP address {}".format(args.public_ip_name))
+                        logging.info("Creating Static IP address {}".format(args.public_ip_name))
                         static_public_ip_address = \
                             AzureActions().create_static_public_ip(args.resource_group_name, args.public_ip_name,
                                                                    args.region, args.instance_name,
                                                                    json.loads(args.tags))
                 if AzureMeta().get_network_interface(args.resource_group_name, args.network_interface_name):
-                    print("REQUESTED NETWORK INTERFACE {} ALREADY EXISTS.".format(args.network_interface_name))
+                    logging.info("REQUESTED NETWORK INTERFACE {} ALREADY EXISTS.".format(args.network_interface_name))
                     network_interface_id = AzureMeta().get_network_interface(args.resource_group_name,
                                                                              args.network_interface_name).id
                 else:
-                    print("Creating Network Interface {}".format(args.network_interface_name))
+                    logging.info("Creating Network Interface {}".format(args.network_interface_name))
                     network_interface_id = AzureActions().create_network_if(args.resource_group_name, args.vpc_name,
                                                                             args.subnet_name,
                                                                             args.network_interface_name, args.region,
@@ -86,7 +87,7 @@ if __name__ == "__main__":
                 if disk:
                     create_option = 'attach'
                     disk_id = disk.id
-                print("Creating instance {}".format(args.instance_name))
+                logging.info("Creating instance {}".format(args.instance_name))
                 AzureActions().create_instance(args.region, args.instance_size, args.service_base_name,
                                                args.instance_name, args.datalab_ssh_user_name, args.public_key,
                                                network_interface_id, args.resource_group_name, args.primary_disk_size,
@@ -95,7 +96,7 @@ if __name__ == "__main__":
                                                create_option, disk_id, args.instance_storage_account_type,
                                                args.image_type)
         except Exception as err:
-            print('Error: {0}'.format(err))
+            logging.error('Error: {0}'.format(err))
             sys.exit(1)
     else:
         parser.print_help()
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_create_notebook_image.py b/infrastructure-provisioning/src/general/scripts/azure/common_create_notebook_image.py
index 294d7a8..a80c43e 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_create_notebook_image.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_create_notebook_image.py
@@ -24,6 +24,7 @@
 import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
+from datalab.logger import logging
 import json
 import os
 import sys
@@ -76,7 +77,7 @@ if __name__ == "__main__":
 
         image = AzureMeta.get_image(image_conf['resource_group_name'], image_conf['full_image_name'])
         if image == '':
-            print('Creating image from existing notebook.')
+            logging.info('Creating image from existing notebook.')
             datalab.actions_lib.prepare_vm_for_image(True, image_conf['datalab_ssh_user'], instance_hostname,
                                                      keyfile_name)
             AzureActions.create_image_from_instance(image_conf['resource_group_name'],
@@ -84,7 +85,7 @@ if __name__ == "__main__":
                                                     os.environ['azure_region'],
                                                     image_conf['full_image_name'],
                                                     json.dumps(image_conf['tags']))
-            print("Image was successfully created.")
+            logging.info("Image was successfully created.")
             try:
                 subprocess.run("~/scripts/{}.py".format('common_prepare_notebook'), shell=True, check=True)
                 instance_running = False
@@ -103,7 +104,7 @@ if __name__ == "__main__":
                     .format(instance_hostname, image_conf['instance_name'], keyfile_name,
                             json.dumps(additional_config), image_conf['datalab_ssh_user'])
                 subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
-                print("Image was successfully created. It's name is {}".format(image_conf['full_image_name']))
+                logging.info("Image was successfully created. It's name is {}".format(image_conf['full_image_name']))
             except Exception as err:
                 AzureActions.remove_instance(image_conf['resource_group_name'], image_conf['instance_name'])
                 datalab.fab.append_result("Failed to create instance from image.", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_create_security_group.py b/infrastructure-provisioning/src/general/scripts/azure/common_create_security_group.py
index 05b6a8b..c206a76 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_create_security_group.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_create_security_group.py
@@ -26,6 +26,7 @@ import json
 import sys
 from datalab.actions_lib import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--resource_group_name', type=str, default='')
@@ -39,17 +40,17 @@ args = parser.parse_args()
 if __name__ == "__main__":
     try:
         if AzureMeta().get_security_group(args.resource_group_name, args.security_group_name):
-            print("REQUESTED SECURITY GROUP {} ALREADY EXISTS. Updating rules".format(args.security_group_name))
+            logging.info("REQUESTED SECURITY GROUP {} ALREADY EXISTS. Updating rules".format(args.security_group_name))
             security_group = AzureActions().create_security_group(args.resource_group_name, args.security_group_name,
                                                                   args.region, json.loads(args.tags),
                                                                   json.loads(args.list_rules), True)
         else:
-            print("Creating security group {}.".format(args.security_group_name))
+            logging.info("Creating security group {}.".format(args.security_group_name))
             security_group = AzureActions().create_security_group(args.resource_group_name, args.security_group_name,
                                                                   args.region, json.loads(args.tags),
                                                                   json.loads(args.list_rules))
-            print("SECURITY GROUP {} has been created".format(args.security_group_name))
+            logging.info("SECURITY GROUP {} has been created".format(args.security_group_name))
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
 
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_create_storage_account.py b/infrastructure-provisioning/src/general/scripts/azure/common_create_storage_account.py
index 04f47bd..17676b0 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_create_storage_account.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_create_storage_account.py
@@ -27,6 +27,7 @@ import sys
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--container_name', type=str, default='')
@@ -42,20 +43,20 @@ if __name__ == "__main__":
         for storage_account in AzureMeta().list_storage_accounts(args.resource_group_name):
             if account_tags["Name"] == storage_account.tags["Name"]:
                 check_account = True
-                print("REQUESTED STORAGE ACCOUNT {} ALREADY EXISTS".format(storage_account.name))
+                logging.info("REQUESTED STORAGE ACCOUNT {} ALREADY EXISTS".format(storage_account.name))
         if not check_account:
             account_name = id_generator().lower()
             check = AzureMeta().check_account_availability(account_name)
             if check.name_available:
-                print("Creating storage account {}.".format(account_name))
+                logging.info("Creating storage account {}.".format(account_name))
                 storage_account = AzureActions().create_storage_account(args.resource_group_name, account_name,
                                                                         args.region, account_tags)
                 blob_container = AzureActions().create_blob_container(args.resource_group_name, account_name,
                                                                       args.container_name)
-                print("STORAGE ACCOUNT {} has been created".format(account_name))
-                print("CONTAINER {} has been created".format(args.container_name))
+                logging.info("STORAGE ACCOUNT {} has been created".format(account_name))
+                logging.info("CONTAINER {} has been created".format(args.container_name))
             else:
-                print("STORAGE ACCOUNT with name {0} could not be created. {1}".format(account_name, check.message))
+                logging.info("STORAGE ACCOUNT with name {0} could not be created. {1}".format(account_name, check.message))
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_create_subnet.py b/infrastructure-provisioning/src/general/scripts/azure/common_create_subnet.py
index 6345565..e774714 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_create_subnet.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_create_subnet.py
@@ -26,6 +26,7 @@ import ipaddress
 import sys
 from datalab.actions_lib import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--resource_group_name', type=str, default='')
@@ -64,14 +65,14 @@ if __name__ == "__main__":
             datalab_subnet_cidr = '{0}/{1}'.format(ipaddress.ip_address(last_ip + 1), args.prefix)
         if args.subnet_name != '':
             if AzureMeta().get_subnet(args.resource_group_name, args.vpc_name, args.subnet_name):
-                print("REQUESTED SUBNET {} ALREADY EXISTS".format(args.subnet_name))
+                logging.info("REQUESTED SUBNET {} ALREADY EXISTS".format(args.subnet_name))
             else:
-                print("Creating Subnet {}".format(args.subnet_name))
+                logging.info("Creating Subnet {}".format(args.subnet_name))
                 AzureActions().create_subnet(args.resource_group_name, args.vpc_name, args.subnet_name,
                                              datalab_subnet_cidr)
         else:
-            print("Subnet name can't be empty")
+            logging.info("Subnet name can't be empty")
             sys.exit(1)
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_download_git_certfile.py b/infrastructure-provisioning/src/general/scripts/azure/common_download_git_certfile.py
index eb707fe..58e18bb 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_download_git_certfile.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_download_git_certfile.py
@@ -25,6 +25,7 @@ import argparse
 import os
 from datalab.actions_lib import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -49,8 +50,8 @@ if __name__ == "__main__":
     if AzureActions().download_from_container(resource_group_name, ssn_storage_account_name, container_name, gitlab_certfile):
         conn.put(gitlab_certfile, gitlab_certfile)
         conn.sudo('chown root:root {}'.format(gitlab_certfile))
-        print('{} has been downloaded'.format(gitlab_certfile))
+        logging.info('{} has been downloaded'.format(gitlab_certfile))
     else:
-        print('There is no {} to download'.format(gitlab_certfile))
+        logging.info('There is no {} to download'.format(gitlab_certfile))
 
     conn.close()
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_notebook_configure_dataengine.py b/infrastructure-provisioning/src/general/scripts/azure/common_notebook_configure_dataengine.py
index 0e4f206..b650ad0 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_notebook_configure_dataengine.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_notebook_configure_dataengine.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -41,18 +41,11 @@ def clear_resources():
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     try:
         # generating variables dictionary
         AzureMeta = datalab.meta_lib.AzureMeta()
         AzureActions = datalab.actions_lib.AzureActions()
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         notebook_config = dict()
         if 'exploratory_name' in os.environ:
             notebook_config['exploratory_name'] = os.environ['exploratory_name']
@@ -98,7 +91,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[INSTALLING KERNELS INTO SPECIFIED NOTEBOOK]')
-        print('[INSTALLING KERNELS INTO SPECIFIED NOTEBOOK]')
         params = "--cluster_name {0} --spark_version {1} --hadoop_version {2} --os_user {3} --spark_master {4}" \
                  " --keyfile {5} --notebook_ip {6} --datalake_enabled {7} --spark_master_ip {8}".\
             format(notebook_config['cluster_name'], os.environ['notebook_spark_version'],
@@ -117,7 +109,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[UPDATING SPARK CONFIGURATION FILES ON NOTEBOOK]')
-        print('[UPDATING SPARK CONFIGURATION FILES ON NOTEBOOK]')
         params = "--hostname {0} " \
                  "--keyfile {1} " \
                  "--os_user {2} " \
@@ -140,7 +131,7 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"notebook_name": notebook_config['notebook_name'],
                    "Action": "Configure notebook server"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_prepare_notebook.py b/infrastructure-provisioning/src/general/scripts/azure/common_prepare_notebook.py
index d37735a..27dfcb8 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_prepare_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_prepare_notebook.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -34,14 +34,6 @@ from Crypto.PublicKey import RSA
 from fabric import *
 
 if __name__ == "__main__":
-    instance_class = 'notebook'
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     # generating variables dictionary
     try:
         AzureMeta = datalab.meta_lib.AzureMeta()
@@ -54,7 +46,7 @@ if __name__ == "__main__":
         notebook_config['endpoint_tag'] = notebook_config['endpoint_name']
         notebook_config['application'] = os.environ['application'].lower()
 
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         try:
             notebook_config['exploratory_name'] = os.environ['exploratory_name']
         except:
@@ -113,16 +105,16 @@ if __name__ == "__main__":
                 notebook_config['endpoint_name'],
                 notebook_config['application'])
 
-        print('Searching pre-configured images')
+        logging.info('Searching pre-configured images')
         notebook_config['image_name'] = os.environ['azure_{}_image_name'.format(os.environ['conf_os_family'])]
         if os.environ['conf_deeplearning_cloud_ami'] == 'true' and os.environ['application'] == 'deeplearning':
             if AzureMeta.get_image(notebook_config['resource_group_name'], notebook_config['expected_image_name']):
                 notebook_config['image_name'] = notebook_config['expected_image_name']
                 notebook_config['image_type'] = 'pre-configured'
-                print('Pre-configured image found. Using: {}'.format(notebook_config['image_name']))
+                logging.info('Pre-configured image found. Using: {}'.format(notebook_config['image_name']))
             else:
                 notebook_config['image_name'] = os.environ['notebook_image_name']
-                print('Pre-configured deeplearning image found. Using: {}'.format(notebook_config['image_name']))
+                logging.info('Pre-configured deeplearning image found. Using: {}'.format(notebook_config['image_name']))
         else:
             notebook_config['notebook_image_name'] = (lambda x: '{0}-{1}-{2}-{3}-{4}'.format(
                 notebook_config['service_base_name'], notebook_config['project_name'], notebook_config['endpoint_name'],
@@ -131,12 +123,12 @@ if __name__ == "__main__":
             if AzureMeta.get_image(notebook_config['resource_group_name'], notebook_config['notebook_image_name']):
                 notebook_config['image_name'] = notebook_config['notebook_image_name']
                 notebook_config['image_type'] = 'pre-configured'
-                print('Pre-configured image found. Using: {}'.format(notebook_config['notebook_image_name']))
+                logging.info('Pre-configured image found. Using: {}'.format(notebook_config['notebook_image_name']))
             else:
                 os.environ['notebook_image_name'] = notebook_config['image_name']
-                print('No pre-configured image found. Using default one: {}'.format(notebook_config['image_name']))
+                logging.info('No pre-configured image found. Using default one: {}'.format(notebook_config['image_name']))
     except Exception as err:
-        print("Failed to generate variables dictionary.")
+        logging.error("Failed to generate variables dictionary.")
         datalab.fab.append_result("Failed to generate variables dictionary.", str(err))
         sys.exit(1)
 
@@ -148,7 +140,6 @@ if __name__ == "__main__":
 
         if edge_status != 'running':
             logging.info('ERROR: Edge node is unavailable! Aborting...')
-            print('ERROR: Edge node is unavailable! Aborting...')
             ssn_hostname = AzureMeta.get_private_ip_address(notebook_config['resource_group_name'],
                                                               os.environ['conf_service_base_name'] + '-ssn')
             datalab.fab.put_resource_status('edge', 'Unavailable', os.environ['ssn_datalab_path'],
@@ -167,7 +158,6 @@ if __name__ == "__main__":
     # launching instance for notebook server
     try:
         logging.info('[CREATE NOTEBOOK INSTANCE]')
-        print('[CREATE NOTEBOOK INSTANCE]')
         params = "--instance_name {} --instance_size {} --region {} --vpc_name {} --network_interface_name {} \
             --security_group_name {} --subnet_name {} --service_base_name {} --resource_group_name {} \
             --datalab_ssh_user_name {} --public_ip_name {} --public_key '''{}''' --primary_disk_size {} \
@@ -190,6 +180,6 @@ if __name__ == "__main__":
         try:
             AzureActions.remove_instance(notebook_config['resource_group_name'], notebook_config['instance_name'])
         except:
-            print("The instance hasn't been created.")
+            logging.error("The instance hasn't been created.")
         datalab.fab.append_result("Failed to create instance.", str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_remove_remote_kernels.py b/infrastructure-provisioning/src/general/scripts/azure/common_remove_remote_kernels.py
index 4aa8d12..128329a 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_remove_remote_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_remove_remote_kernels.py
@@ -25,6 +25,7 @@ import argparse
 import sys
 from datalab.actions_lib import *
 from datalab.fab import find_cluster_kernels
+from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -37,7 +38,7 @@ args = parser.parse_args()
 
 
 if __name__ == "__main__":
-    print('Configure connections')
+    logging.info('Configure connections')
     global conn
     conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
 
@@ -47,6 +48,6 @@ if __name__ == "__main__":
             AzureActions().remove_dataengine_kernels(args.resource_group_name, args.notebook_name,
                                                      args.os_user, args.keyfile, cluster)
     except Exception as err:
-        print('Failed to remove cluster kernels.', str(err))
+        logging.error('Failed to remove cluster kernels.', str(err))
         sys.exit(1)
     conn.close()
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_reupload_key.py b/infrastructure-provisioning/src/general/scripts/azure/common_reupload_key.py
index 73dff2f..09c1bd4 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_reupload_key.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_reupload_key.py
@@ -27,6 +27,7 @@ import subprocess
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -47,5 +48,5 @@ if __name__ == "__main__":
         try:
             subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except Exception as err:
-            print('Error: {0}'.format(err))
+            logging.error('Error: {0}'.format(err))
             sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_start_notebook.py b/infrastructure-provisioning/src/general/scripts/azure/common_start_notebook.py
index af27198..798e454 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_start_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_start_notebook.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -33,16 +33,10 @@ import subprocess
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     # generating variables dictionary
     AzureMeta = datalab.meta_lib.AzureMeta()
     AzureActions = datalab.actions_lib.AzureActions()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     notebook_config = dict()
     notebook_config['service_base_name'] = os.environ['conf_service_base_name']
     notebook_config['resource_group_name'] = os.environ['azure_resource_group_name']
@@ -50,11 +44,11 @@ if __name__ == "__main__":
 
     try:
         logging.info('[START NOTEBOOK]')
-        print('[START NOTEBOOK]')
+        logging.info('[START NOTEBOOK]')
         try:
-            print("Starting notebook")
+            logging.info("Starting notebook")
             AzureActions.start_instance(notebook_config['resource_group_name'], notebook_config['notebook_name'])
-            print("Instance {} has been started".format(notebook_config['notebook_name']))
+            logging.info("Instance {} has been started".format(notebook_config['notebook_name']))
         except:
             traceback.print_exc()
             raise Exception
@@ -64,7 +58,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[SETUP USER GIT CREDENTIALS]')
-        print('[SETUP USER GIT CREDENTIALS]')
         notebook_config['notebook_ip'] = AzureMeta.get_private_ip_address(
             notebook_config['resource_group_name'], notebook_config['notebook_name'])
         notebook_config['keyfile'] = '{}{}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
@@ -82,7 +75,6 @@ if __name__ == "__main__":
     if os.environ['azure_datalake_enable'] == 'true':
         try:
             logging.info('[UPDATE STORAGE CREDENTIALS]')
-            print('[UPDATE STORAGE CREDENTIALS]')
             notebook_config['notebook_ip'] = AzureMeta.get_private_ip_address(
                 notebook_config['resource_group_name'], notebook_config['notebook_name'])
             global conn
@@ -103,7 +95,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[UPDATE LAST ACTIVITY TIME]')
-        print('[UPDATE LAST ACTIVITY TIME]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(os.environ['conf_os_user'], notebook_config['notebook_ip'], notebook_config['keyfile'])
         try:
@@ -118,15 +109,14 @@ if __name__ == "__main__":
     try:
         ip_address = AzureMeta.get_private_ip_address(notebook_config['resource_group_name'],
                                                       notebook_config['notebook_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['notebook_name']))
-        print("Private IP: {}".format(ip_address))
+        logging.info("Instance name: {}".format(notebook_config['notebook_name']))
+        logging.info("Private IP: {}".format(ip_address))
         with open("/root/result.json", 'w') as result:
             res = {"ip": ip_address,
                    "notebook_name": notebook_config['notebook_name'],
                    "Action": "Start up notebook server"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_stop_notebook.py b/infrastructure-provisioning/src/general/scripts/azure/common_stop_notebook.py
index 82a9533..3154875 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_stop_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_stop_notebook.py
@@ -25,13 +25,13 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 
 
 def stop_notebook(resource_group_name, notebook_name):
-    print("Stopping data engine cluster")
+    logging.info("Stopping data engine cluster")
     cluster_list = []
     try:
         for vm in AzureMeta.compute_client.virtual_machines.list(resource_group_name):
@@ -40,35 +40,28 @@ def stop_notebook(resource_group_name, notebook_name):
                     if 'master' == vm.tags["Type"]:
                         cluster_list.append(vm.tags["Name"])
                     AzureActions.stop_instance(resource_group_name, vm.name)
-                    print("Instance {} has been stopped".format(vm.name))
+                    logging.info("Instance {} has been stopped".format(vm.name))
     except Exception as err:
         datalab.fab.append_result("Failed to stop clusters", str(err))
         sys.exit(1)
 
-    print("Stopping notebook")
+    logging.info("Stopping notebook")
     try:
         for vm in AzureMeta.compute_client.virtual_machines.list(resource_group_name):
             if "Name" in vm.tags:
                 if notebook_name == vm.tags["Name"]:
                     AzureActions.stop_instance(resource_group_name, vm.name)
-                    print("Instance {} has been stopped".format(vm.name))
+                    logging.info("Instance {} has been stopped".format(vm.name))
     except Exception as err:
         datalab.fab.append_result("Failed to stop instance", str(err))
         sys.exit(1)
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     # generating variables dictionary
     AzureMeta = datalab.meta_lib.AzureMeta()
     AzureActions = datalab.actions_lib.AzureActions()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     notebook_config = dict()
     if 'exploratory_name' in os.environ:
         notebook_config['exploratory_name'] = os.environ['exploratory_name']
@@ -82,7 +75,6 @@ if __name__ == "__main__":
     notebook_config['notebook_name'] = os.environ['notebook_instance_name']
 
     logging.info('[STOP NOTEBOOK]')
-    print('[STOP NOTEBOOK]')
     try:
         stop_notebook(notebook_config['resource_group_name'], notebook_config['notebook_name'])
     except Exception as err:
@@ -93,7 +85,7 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"notebook_name": notebook_config['notebook_name'],
                    "Action": "Stop notebook server"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_terminate_notebook.py b/infrastructure-provisioning/src/general/scripts/azure/common_terminate_notebook.py
index d2b8216..77ef93f 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_terminate_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_terminate_notebook.py
@@ -25,47 +25,41 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
 
 
 def terminate_nb(resource_group_name, notebook_name):
-    print("Terminating data engine cluster")
+    logging.info("Terminating data engine cluster")
     try:
         for vm in AzureMeta.compute_client.virtual_machines.list(resource_group_name):
             if "notebook_name" in vm.tags:
                 if notebook_name == vm.tags['notebook_name']:
                     AzureActions.remove_instance(resource_group_name, vm.name)
-                    print("Instance {} has been terminated".format(vm.name))
+                    logging.info("Instance {} has been terminated".format(vm.name))
     except Exception as err:
         datalab.fab.append_result("Failed to terminate clusters", str(err))
         sys.exit(1)
 
-    print("Terminating notebook")
+    logging.info("Terminating notebook")
     try:
         for vm in AzureMeta.compute_client.virtual_machines.list(resource_group_name):
             if "Name" in vm.tags:
                 if notebook_name == vm.tags["Name"]:
                     AzureActions.remove_instance(resource_group_name, vm.name)
-                    print("Instance {} has been terminated".format(vm.name))
+                    logging.info("Instance {} has been terminated".format(vm.name))
     except Exception as err:
         datalab.fab.append_result("Failed to terminate instance", str(err))
         sys.exit(1)
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     # generating variables dictionary
     AzureMeta = datalab.meta_lib.AzureMeta()
     AzureActions = datalab.actions_lib.AzureActions()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     notebook_config = dict()
     if 'exploratory_name' in os.environ:
         notebook_config['exploratory_name'] = os.environ['exploratory_name']
@@ -80,7 +74,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[TERMINATE NOTEBOOK]')
-        print('[TERMINATE NOTEBOOK]')
         try:
             terminate_nb(notebook_config['resource_group_name'], notebook_config['notebook_name'])
         except Exception as err:
@@ -94,7 +87,7 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"notebook_name": notebook_config['notebook_name'],
                    "Action": "Terminate notebook server"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/azure/dataengine_configure.py b/infrastructure-provisioning/src/general/scripts/azure/dataengine_configure.py
index 27bb216..3d25aea 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/dataengine_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/dataengine_configure.py
@@ -25,7 +25,7 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import multiprocessing
 import os
 import sys
@@ -40,7 +40,6 @@ def configure_slave(slave_number, data_engine):
     slave_hostname = AzureMeta.get_private_ip_address(data_engine['resource_group_name'], slave_name)
     try:
         logging.info('[CREATING DATALAB SSH USER ON SLAVE NODE]')
-        print('[CREATING DATALAB SSH USER ON SLAVE NODE]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format \
             (slave_hostname, os.environ['conf_key_dir'] + data_engine['key_name'] + ".pem", initial_user,
              data_engine['datalab_ssh_user'], sudo_group)
@@ -56,7 +55,6 @@ def configure_slave(slave_number, data_engine):
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY ON SLAVE]')
         logging.info('[INSTALLING USERs KEY ON SLAVE]')
         additional_config = {"user_keyname": data_engine['project_name'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -75,7 +73,6 @@ def configure_slave(slave_number, data_engine):
 
     try:
         logging.info('[CLEANING INSTANCE FOR SLAVE NODE]')
-        print('[CLEANING INSTANCE FOR SLAVE NODE]')
         params = '--hostname {} --keyfile {} --os_user {} --application {}' \
             .format(slave_hostname, keyfile_name, data_engine['datalab_ssh_user'], os.environ['application'])
         try:
@@ -90,7 +87,6 @@ def configure_slave(slave_number, data_engine):
 
     try:
         logging.info('[CONFIGURE PROXY ON SLAVE NODE]')
-        print('[CONFIGURE PROXY ON ON SLAVE NODE]')
         additional_config = {"proxy_host": edge_instance_private_hostname, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}"\
             .format(slave_hostname, slave_name, keyfile_name, json.dumps(additional_config),
@@ -107,7 +103,6 @@ def configure_slave(slave_number, data_engine):
 
     try:
         logging.info('[INSTALLING PREREQUISITES ON SLAVE NODE]')
-        print('[INSTALLING PREREQUISITES ON SLAVE NODE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}". \
             format(slave_hostname, keyfile_name, data_engine['datalab_ssh_user'], data_engine['region'],
                    edge_instance_private_hostname)
@@ -123,7 +118,6 @@ def configure_slave(slave_number, data_engine):
 
     try:
         logging.info('[CONFIGURE SLAVE NODE {}]'.format(slave + 1))
-        print('[CONFIGURE SLAVE NODE {}]'.format(slave + 1))
         params = "--hostname {} --keyfile {} --region {} --spark_version {} --hadoop_version {} --os_user {} " \
                  "--scala_version {} --master_ip {} --node_type {}". \
             format(slave_hostname, keyfile_name, data_engine['region'], os.environ['notebook_spark_version'],
@@ -149,17 +143,10 @@ def clear_resources():
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.INFO,
-                        filename=local_log_filepath)
-
     try:
         AzureMeta = datalab.meta_lib.AzureMeta()
         AzureActions = datalab.actions_lib.AzureActions()
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         data_engine = dict()
         if 'exploratory_name' in os.environ:
             data_engine['exploratory_name'] = os.environ['exploratory_name']
@@ -230,8 +217,7 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        logging.info('[CREATING DATA ATA LAB SSH USER ON MASTER NODE]')
-        print('[CREATING DATALAB SSH USER ON MASTER NODE]')
+        logging.info('[CREATING DATALAB SSH USER ON MASTER NODE]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format \
             (master_node_hostname, os.environ['conf_key_dir'] + data_engine['key_name'] + ".pem", initial_user,
              data_engine['datalab_ssh_user'], sudo_group)
@@ -247,7 +233,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY ON MASTER]')
         logging.info('[INSTALLING USERs KEY ON MASTER]')
         additional_config = {"user_keyname": data_engine['project_name'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -267,7 +252,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CLEANING INSTANCE FOR MASTER NODE]')
-        print('[CLEANING INSTANCE FOR MASTER NODE]')
         params = '--hostname {} --keyfile {} --os_user {} --application {}' \
             .format(master_node_hostname, keyfile_name, data_engine['datalab_ssh_user'], os.environ['application'])
         try:
@@ -282,7 +266,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CONFIGURE PROXY ON MASTER NODE]')
-        print('[CONFIGURE PROXY ON ON MASTER NODE]')
         additional_config = {"proxy_host": edge_instance_private_hostname, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}"\
             .format(master_node_hostname, data_engine['master_node_name'], keyfile_name, json.dumps(additional_config),
@@ -299,7 +282,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[INSTALLING PREREQUISITES ON MASTER NODE]')
-        print('[INSTALLING PREREQUISITES ON MASTER NODE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}". \
             format(master_node_hostname, keyfile_name, data_engine['datalab_ssh_user'], data_engine['region'],
                    edge_instance_private_hostname)
@@ -315,7 +297,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CONFIGURE MASTER NODE]')
-        print('[CONFIGURE MASTER NODE]')
         params = "--hostname {} --keyfile {} --region {} --spark_version {} --hadoop_version {} --os_user {} " \
                  "--scala_version {} --master_ip {} --node_type {}".\
             format(master_node_hostname, keyfile_name, data_engine['region'], os.environ['notebook_spark_version'],
@@ -349,7 +330,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         notebook_instance_ip = AzureMeta.get_private_ip_address(data_engine['resource_group_name'],
                                                                 data_engine['notebook_name'])
@@ -391,13 +371,12 @@ if __name__ == "__main__":
         spark_master_access_url = "https://" + edge_instance_hostname + "/{}/".format(
             data_engine['exploratory_name'] + '_' + data_engine['computational_name'])
         logging.info('[SUMMARY]')
-        print('[SUMMARY]')
-        print("Service base name: {}".format(data_engine['service_base_name']))
-        print("Region: {}".format(data_engine['region']))
-        print("Cluster name: {}".format(data_engine['cluster_name']))
-        print("Master node shape: {}".format(data_engine['master_size']))
-        print("Slave node shape: {}".format(data_engine['slave_size']))
-        print("Instance count: {}".format(str(data_engine['instance_count'])))
+        logging.info("Service base name: {}".format(data_engine['service_base_name']))
+        logging.info("Region: {}".format(data_engine['region']))
+        logging.info("Cluster name: {}".format(data_engine['cluster_name']))
+        logging.info("Master node shape: {}".format(data_engine['master_size']))
+        logging.info("Slave node shape: {}".format(data_engine['slave_size']))
+        logging.info("Instance count: {}".format(str(data_engine['instance_count'])))
         with open("/root/result.json", 'w') as result:
             res = {"hostname": data_engine['cluster_name'],
                    "instance_id": data_engine['master_node_name'],
@@ -410,7 +389,7 @@ if __name__ == "__main__":
                        # "url": spark_master_url}
                    ]
                    }
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/azure/dataengine_prepare.py b/infrastructure-provisioning/src/general/scripts/azure/dataengine_prepare.py
index 995b7d0..06dd514 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/dataengine_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/dataengine_prepare.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -34,12 +34,6 @@ from Crypto.PublicKey import RSA
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.INFO,
-                        filename=local_log_filepath)
     try:
         AzureMeta = datalab.meta_lib.AzureMeta()
         AzureActions = datalab.actions_lib.AzureActions()
@@ -49,7 +43,7 @@ if __name__ == "__main__":
         data_engine['endpoint_name'] = os.environ['endpoint_name']
         data_engine['project_tag'] = data_engine['project_name']
         data_engine['endpoint_tag'] = data_engine['endpoint_name']
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         if 'exploratory_name' in os.environ:
             data_engine['exploratory_name'] = os.environ['exploratory_name']
         else:
@@ -119,15 +113,15 @@ if __name__ == "__main__":
         data_engine['notebook_image_name'] = (lambda x: os.environ['notebook_image_name'] if x != 'None'
                     else data_engine['expected_image_name'])(str(os.environ.get('notebook_image_name')))
 
-        print('Searching pre-configured images')
+        logging.info('Searching pre-configured images')
         if AzureMeta.get_image(data_engine['resource_group_name'], data_engine['notebook_image_name']) and \
                         os.environ['application'] in os.environ['dataengine_image_notebooks'].split(','):
             data_engine['image_name'] = data_engine['notebook_image_name']
             data_engine['image_type'] = 'pre-configured'
-            print('Pre-configured image found. Using: {}'.format(data_engine['notebook_image_name']))
+            logging.info('Pre-configured image found. Using: {}'.format(data_engine['notebook_image_name']))
         else:
             data_engine['image_name'] = os.environ['azure_{}_image_name'.format(os.environ['conf_os_family'])]
-            print('No pre-configured image found. Using default one: {}'.format(data_engine['image_name']))
+            logging.info('No pre-configured image found. Using default one: {}'.format(data_engine['image_name']))
     except Exception as err:
         datalab.fab.append_result("Failed to generate variables dictionary", str(err))
         sys.exit(1)
@@ -139,7 +133,6 @@ if __name__ == "__main__":
                                                                               data_engine['endpoint_name']))
         if edge_status != 'running':
             logging.info('ERROR: Edge node is unavailable! Aborting...')
-            print('ERROR: Edge node is unavailable! Aborting...')
             ssn_hostname = AzureMeta.get_private_ip_address(data_engine['resource_group_name'],
                                                             data_engine['service_base_name'] + '-ssn')
             datalab.fab.put_resource_status('edge', 'Unavailable', os.environ['ssn_datalab_path'],
@@ -160,7 +153,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATE MASTER NODE]')
-        print('[CREATE MASTER NODE]')
 
         if 'NC' in data_engine['master_size']:
             data_engine['instance_storage_account_type'] = 'Standard_LRS'
@@ -186,14 +178,13 @@ if __name__ == "__main__":
         try:
             AzureActions.remove_instance(data_engine['resource_group_name'], data_engine['master_node_name'])
         except:
-            print("The instance hasn't been created.")
+            logging.info("The instance hasn't been created.")
         datalab.fab.append_result("Failed to create master instance.", str(err))
         sys.exit(1)
 
     try:
         for i in range(data_engine['instance_count'] - 1):
             logging.info('[CREATE SLAVE NODE {}]'.format(i + 1))
-            print('[CREATE SLAVE NODE {}]'.format(i + 1))
 
             slave_name = data_engine['slave_node_name'] + '{}'.format(i + 1)
             slave_nif_name = slave_name + '-nif'
@@ -222,7 +213,7 @@ if __name__ == "__main__":
             try:
                 AzureActions.remove_instance(data_engine['resource_group_name'], slave_name)
             except:
-                print("The slave instance {} hasn't been created.".format(slave_name))
+                logging.info("The slave instance {} hasn't been created.".format(slave_name))
         AzureActions.remove_instance(data_engine['resource_group_name'], data_engine['master_node_name'])
         datalab.fab.append_result("Failed to create slave instances.", str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/azure/dataengine_start.py b/infrastructure-provisioning/src/general/scripts/azure/dataengine_start.py
index 2f100fd..5b7938b 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/dataengine_start.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/dataengine_start.py
@@ -25,7 +25,7 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -34,29 +34,23 @@ from fabric import *
 
 
 def start_data_engine(resource_group_name, cluster_name):
-    print("Starting data engine cluster")
+    logging.info("Starting data engine cluster")
     try:
         for vm in AzureMeta.compute_client.virtual_machines.list(resource_group_name):
             if "Name" in vm.tags:
                 if cluster_name == vm.tags["Name"]:
                     AzureActions.start_instance(resource_group_name, vm.name)
-                    print("Instance {} has been started".format(vm.name))
+                    logging.info("Instance {} has been started".format(vm.name))
     except Exception as err:
         datalab.fab.append_result("Failed to start dataengine", str(err))
         sys.exit(1)
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     # generating variables dictionary
     AzureMeta = datalab.meta_lib.AzureMeta()
     AzureActions = datalab.actions_lib.AzureActions()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     data_engine = dict()
     if 'exploratory_name' in os.environ:
         data_engine['exploratory_name'] = os.environ['exploratory_name']
@@ -77,7 +71,6 @@ if __name__ == "__main__":
                                                           data_engine['computational_name'])
     try:
         logging.info('[STARTING DATA ENGINE]')
-        print('[STARTING DATA ENGINE]')
         try:
             start_data_engine(data_engine['resource_group_name'], data_engine['cluster_name'])
         except Exception as err:
@@ -89,7 +82,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[UPDATE LAST ACTIVITY TIME]')
-        print('[UPDATE LAST ACTIVITY TIME]')
         data_engine['computational_id'] = data_engine['cluster_name'] + '-m'
         data_engine['notebook_ip'] = AzureMeta.get_private_ip_address(data_engine['resource_group_name'],
                                                                       os.environ['notebook_instance_name'])
@@ -112,7 +104,7 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"service_base_name": data_engine['service_base_name'],
                    "Action": "Start Data Engine"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/azure/dataengine_stop.py b/infrastructure-provisioning/src/general/scripts/azure/dataengine_stop.py
index 9db2c9b..62ecad7 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/dataengine_stop.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/dataengine_stop.py
@@ -25,36 +25,30 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
 
 
 def stop_data_engine(resource_group_name, cluster_name):
-    print("Stopping data engine cluster")
+    logging.info("Stopping data engine cluster")
     try:
         for vm in AzureMeta.compute_client.virtual_machines.list(resource_group_name):
             if "Name" in vm.tags:
                 if cluster_name == vm.tags["Name"]:
                     AzureActions.stop_instance(resource_group_name, vm.name)
-                    print("Instance {} has been stopped".format(vm.name))
+                    logging.info("Instance {} has been stopped".format(vm.name))
     except Exception as err:
         datalab.fab.append_result("Failed to stop dataengine", str(err))
         sys.exit(1)
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     # generating variables dictionary
     AzureMeta = datalab.meta_lib.AzureMeta()
     AzureActions = datalab.actions_lib.AzureActions()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     data_engine = dict()
     if 'exploratory_name' in os.environ:
         data_engine['exploratory_name'] = os.environ['exploratory_name']
@@ -75,7 +69,6 @@ if __name__ == "__main__":
                                                           data_engine['computational_name'])
     try:
         logging.info('[STOPPING DATA ENGINE]')
-        print('[STOPPING DATA ENGINE]')
         try:
             stop_data_engine(data_engine['resource_group_name'], data_engine['cluster_name'])
         except Exception as err:
@@ -89,7 +82,7 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"service_base_name": data_engine['service_base_name'],
                    "Action": "Stop Data Engine"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/azure/dataengine_terminate.py b/infrastructure-provisioning/src/general/scripts/azure/dataengine_terminate.py
index 7cec539..95ca3b4 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/dataengine_terminate.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/dataengine_terminate.py
@@ -25,25 +25,25 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
 
 
 def terminate_data_engine(resource_group_name, notebook_name, os_user, key_path, cluster_name):
-    print("Terminating data engine cluster")
+    logging.info("Terminating data engine cluster")
     try:
         for vm in AzureMeta.compute_client.virtual_machines.list(resource_group_name):
             if "Name" in vm.tags:
                 if cluster_name == vm.tags["Name"]:
                     AzureActions.remove_instance(resource_group_name, vm.name)
-                    print("Instance {} has been terminated".format(vm.name))
+                    logging.info("Instance {} has been terminated".format(vm.name))
     except Exception as err:
         datalab.fab.append_result("Failed to terminate dataengine", str(err))
         sys.exit(1)
 
-    print("Removing Data Engine kernels from notebook")
+    logging.info("Removing Data Engine kernels from notebook")
     try:
         AzureActions.remove_dataengine_kernels(resource_group_name, notebook_name, os_user, key_path, cluster_name)
     except Exception as err:
@@ -52,16 +52,10 @@ def terminate_data_engine(resource_group_name, notebook_name, os_user, key_path,
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     # generating variables dictionary
     AzureMeta = datalab.meta_lib.AzureMeta()
     AzureActions = datalab.actions_lib.AzureActions()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     data_engine = dict()
     if 'exploratory_name' in os.environ:
         data_engine['exploratory_name'] = os.environ['exploratory_name']
@@ -86,7 +80,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[TERMINATE DATA ENGINE]')
-        print('[TERMINATE DATA ENGINE]')
         try:
             terminate_data_engine(data_engine['resource_group_name'], data_engine['notebook_name'],
                                   os.environ['conf_os_user'], data_engine['key_path'], data_engine['cluster_name'])
@@ -101,7 +94,7 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"service_base_name": data_engine['service_base_name'],
                    "Action": "Terminate Data Engine"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/azure/deeplearning_configure.py b/infrastructure-provisioning/src/general/scripts/azure/deeplearning_configure.py
index c228c97..ae83807 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/deeplearning_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/deeplearning_configure.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -33,13 +33,6 @@ import subprocess
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     try:
         AzureMeta = datalab.meta_lib.AzureMeta()
         AzureActions = datalab.actions_lib.AzureActions()
@@ -134,8 +127,7 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        logging.info('[CREATING DataLab SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
+        logging.info('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             instance_hostname, os.environ['conf_key_dir'] + os.environ['conf_key_name'] + ".pem",
             notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
@@ -153,7 +145,6 @@ if __name__ == "__main__":
     # configuring proxy on Notebook instance
     try:
         logging.info('[CONFIGURE PROXY ON DEEP LEARNING INSTANCE]')
-        print('[CONFIGURE PROXY ON DEEP LEARNING  INSTANCE]')
         additional_config = {"proxy_host": edge_instance_private_hostname, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}"\
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
@@ -169,7 +160,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": notebook_config['user_keyname'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -188,7 +178,6 @@ if __name__ == "__main__":
     # updating repositories & installing python packages
     try:
         logging.info('[INSTALLING PREREQUISITES TO DEEPLEARNING NOTEBOOK INSTANCE]')
-        print('[INSTALLING PREREQUISITES TO DEEPLEARNING NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}".format(
             instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['azure_region'],
             edge_instance_private_hostname)
@@ -204,7 +193,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CONFIGURE DEEP LEARNING NOTEBOOK INSTANCE]')
-        print('[CONFIGURE DEEP LEARNING NOTEBOOK INSTANCE]')
         params = "--hostname {0} --keyfile {1} " \
                  "--os_user {2} --jupyter_version {3} " \
                  "--scala_version {4} --spark_version {5} " \
@@ -228,7 +216,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
@@ -244,7 +231,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[POST CONFIGURING PROCESS]')
-        print('[POST CONFIGURING PROCESS')
         if notebook_config['notebook_image_name'] not in [notebook_config['expected_image_name'], 'None', '']:
             params = "--hostname {} --keyfile {} --os_user {} --resource_group_name {} --notebook_name {}" \
                 .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
@@ -261,10 +247,10 @@ if __name__ == "__main__":
 
     if notebook_config['image_enabled'] == 'true':
         try:
-            print('[CREATING IMAGE]')
+            logging.info('[CREATING IMAGE]')
             image = AzureMeta.get_image(notebook_config['resource_group_name'], notebook_config['expected_image_name'])
             if image == '':
-                print("Looks like it's first time we configure notebook server. Creating image.")
+                logging.info("Looks like it's first time we configure notebook server. Creating image.")
                 datalab.actions_lib.prepare_vm_for_image(True, notebook_config['datalab_ssh_user'], instance_hostname,
                                                          keyfile_name)
                 AzureActions.create_image_from_instance(notebook_config['resource_group_name'],
@@ -272,7 +258,7 @@ if __name__ == "__main__":
                                                         os.environ['azure_region'],
                                                         notebook_config['expected_image_name'],
                                                         json.dumps(notebook_config['image_tags']))
-                print("Image was successfully created.")
+                logging.info("Image was successfully created.")
                 subprocess.run("~/scripts/{}.py".format('common_prepare_notebook'), shell=True, check=True)
                 instance_running = False
                 while not instance_running:
@@ -296,7 +282,6 @@ if __name__ == "__main__":
             sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         additional_info = {
             'instance_hostname': instance_hostname,
@@ -336,18 +321,17 @@ if __name__ == "__main__":
             notebook_config['exploratory_name'])
         tensorboard_access_url = "https://" + edge_instance_hostname + "/{}-tensor/".format(
             notebook_config['exploratory_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['instance_name']))
-        print("Private IP: {}".format(ip_address))
-        print("Instance type: {}".format(notebook_config['instance_size']))
-        print("Key name: {}".format(notebook_config['key_name']))
-        print("User key name: {}".format(notebook_config['user_keyname']))
-        print("SG name: {}".format(notebook_config['security_group_name']))
-        print("Jupyter URL: {}".format(jupyter_ip_url))
-        print("Tensor Board URL: {}".format(tensorboard_ip_url))
-        print("Ungit URL: {}".format(ungit_ip_url))
-        print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(
+        logging.info("Instance name: {}".format(notebook_config['instance_name']))
+        logging.info("Private IP: {}".format(ip_address))
+        logging.info("Instance type: {}".format(notebook_config['instance_size']))
+        logging.info("Key name: {}".format(notebook_config['key_name']))
+        logging.info("User key name: {}".format(notebook_config['user_keyname']))
+        logging.info("SG name: {}".format(notebook_config['security_group_name']))
+        logging.info("Jupyter URL: {}".format(jupyter_ip_url))
+        logging.info("Tensor Board URL: {}".format(tensorboard_ip_url))
+        logging.info("Ungit URL: {}".format(ungit_ip_url))
+        logging.info('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(
             notebook_config['key_name'], notebook_config['datalab_ssh_user'], ip_address))
 
         with open("/root/result.json", 'w') as result:
diff --git a/infrastructure-provisioning/src/general/scripts/azure/edge_configure.py b/infrastructure-provisioning/src/general/scripts/azure/edge_configure.py
index 46a9607..adc7c35 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/edge_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/edge_configure.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -34,13 +34,6 @@ import subprocess
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/edge/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     def clear_resources():
         AzureActions.remove_instance(edge_conf['resource_group_name'], edge_conf['instance_name'])
         AzureActions.remove_subnet(edge_conf['resource_group_name'], edge_conf['vpc_name'],
@@ -61,7 +54,7 @@ if __name__ == "__main__":
                     AzureActions.remove_datalake_directory(datalake.name, edge_conf['datalake_user_directory_name'])
 
     try:
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         AzureMeta = datalab.meta_lib.AzureMeta()
         AzureActions = datalab.actions_lib.AzureActions()
         edge_conf = dict()
@@ -151,7 +144,6 @@ if __name__ == "__main__":
             edge_conf['sudo_group'] = 'wheel'
 
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             edge_conf['instance_hostname'], os.environ['conf_key_dir'] + os.environ['conf_key_name'] + ".pem",
             edge_conf['initial_user'], edge_conf['datalab_ssh_user'], edge_conf['sudo_group'])
@@ -167,7 +159,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING PREREQUISITES]')
         logging.info('[INSTALLING PREREQUISITES]')
         params = "--hostname {} --keyfile {} --user {} --region {}".format(
             edge_conf['instance_hostname'], edge_conf['keyfile_name'], edge_conf['datalab_ssh_user'],
@@ -183,7 +174,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING HTTP PROXY]')
         logging.info('[INSTALLING HTTP PROXY]')
         additional_config = {"exploratory_subnet": edge_conf['private_subnet_cidr'],
                              "template_file": "/root/templates/squid.conf",
@@ -209,7 +199,6 @@ if __name__ == "__main__":
 
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": edge_conf['user_keyname'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -227,7 +216,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING NGINX REVERSE PROXY]')
         logging.info('[INSTALLING NGINX REVERSE PROXY]')
         edge_conf['keycloak_client_secret'] = str(uuid.uuid4())
         params = "--hostname {} --keyfile {} --user {} --keycloak_client_id {} --keycloak_client_secret {} " \
@@ -270,25 +258,24 @@ if __name__ == "__main__":
             if edge_conf['user_storage_account_name'] == storage_account.tags["Name"]:
                 edge_conf['user_storage_account_name'] = storage_account.name
 
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(edge_conf['instance_name']))
-        print("Hostname: {}".format(edge_conf['instance_dns_name']))
-        print("Public IP: {}".format(edge_conf['edge_public_ip']))
-        print("Private IP: {}".format(edge_conf['edge_private_ip']))
-        print("Key name: {}".format(edge_conf['key_name']))
-        print("User storage account name: {}".format(edge_conf['user_storage_account_name']))
-        print("User container name: {}".format(edge_conf['user_container_name']))
+        logging.info("Instance name: {}".format(edge_conf['instance_name']))
+        logging.info("Hostname: {}".format(edge_conf['instance_dns_name']))
+        logging.info("Public IP: {}".format(edge_conf['edge_public_ip']))
+        logging.info("Private IP: {}".format(edge_conf['edge_private_ip']))
+        logging.info("Key name: {}".format(edge_conf['key_name']))
+        logging.info("User storage account name: {}".format(edge_conf['user_storage_account_name']))
+        logging.info("User container name: {}".format(edge_conf['user_container_name']))
         if os.environ['azure_datalake_enable'] == 'true':
             for datalake in AzureMeta.list_datalakes(edge_conf['resource_group_name']):
                 if edge_conf['datalake_store_name'] == datalake.tags["Name"]:
                     edge_conf['datalake_id'] = datalake.name
-            print("Data Lake name: {}".format(edge_conf['datalake_id']))
-            print("Data Lake tag name: {}".format(edge_conf['datalake_store_name']))
-            print("Data Lake Store user directory name: {}".format(edge_conf['datalake_user_directory_name']))
-        print("Notebook SG: {}".format(edge_conf['notebook_security_group_name']))
-        print("Edge SG: {}".format(edge_conf['edge_security_group_name']))
-        print("Notebook subnet: {}".format(edge_conf['private_subnet_cidr']))
+            logging.info("Data Lake name: {}".format(edge_conf['datalake_id']))
+            logging.info("Data Lake tag name: {}".format(edge_conf['datalake_store_name']))
+            logging.info("Data Lake Store user directory name: {}".format(edge_conf['datalake_user_directory_name']))
+        logging.info("Notebook SG: {}".format(edge_conf['notebook_security_group_name']))
+        logging.info("Edge SG: {}".format(edge_conf['edge_security_group_name']))
+        logging.info("Notebook subnet: {}".format(edge_conf['private_subnet_cidr']))
         with open("/root/result.json", 'w') as result:
             if os.environ['azure_datalake_enable'] == 'false':
                 res = {"hostname": edge_conf['instance_dns_name'],
@@ -334,7 +321,7 @@ if __name__ == "__main__":
                        "project_name": edge_conf['project_name'],
                        "@class": "com.epam.datalab.dto.azure.edge.EdgeInfoAzure",
                        "Action": "Create new EDGE server"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results.", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/azure/edge_prepare.py b/infrastructure-provisioning/src/general/scripts/azure/edge_prepare.py
index 528c8ab..1e94746 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/edge_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/edge_prepare.py
@@ -30,17 +30,11 @@ from Crypto.PublicKey import RSA
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/edge/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     try:
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         edge_conf = dict()
         edge_conf['service_base_name'] = os.environ['conf_service_base_name']
         edge_conf['resource_group_name'] = os.environ['azure_resource_group_name']
@@ -95,19 +89,18 @@ if __name__ == "__main__":
         # FUSE in case of absence of user's key
         fname = "{}{}.pub".format(os.environ['conf_key_dir'], edge_conf['user_keyname'])
         if not os.path.isfile(fname):
-            print("USERs PUBLIC KEY DOES NOT EXIST in {}".format(fname))
+            logging.info("USERs PUBLIC KEY DOES NOT EXIST in {}".format(fname))
             sys.exit(1)
 
-        print("Will create exploratory environment with edge node as access point as following: {}".format(json.dumps(edge_conf, sort_keys=True, indent=4, separators=(',', ': '))))
+        logging.info("Will create exploratory environment with edge node as access point as following: {}".format(json.dumps(edge_conf, sort_keys=True, indent=4, separators=(',', ': '))))
         logging.info(json.dumps(edge_conf))
     except Exception as err:
-        print("Failed to generate variables dictionary.")
+        logging.error("Failed to generate variables dictionary.")
         append_result("Failed to generate variables dictionary.", str(err))
         sys.exit(1)
 
     try:
         logging.info('[CREATE SUBNET]')
-        print('[CREATE SUBNET]')
         params = "--resource_group_name {} --vpc_name {} --region {} --vpc_cidr {} --subnet_name {} --prefix {}".\
             format(edge_conf['resource_group_name'], edge_conf['vpc_name'], edge_conf['region'], edge_conf['vpc_cidr'],
                    edge_conf['private_subnet_name'], edge_conf['private_subnet_prefix'])
@@ -117,22 +110,21 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.info('Error: {0}'.format(err))
         try:
             AzureActions().remove_subnet(edge_conf['resource_group_name'], edge_conf['vpc_name'],
                                          edge_conf['private_subnet_name'])
         except:
-            print("Subnet hasn't been created.")
+            logging.info("Subnet hasn't been created.")
         append_result("Failed to create subnet.", str(err))
         sys.exit(1)
 
     edge_conf['private_subnet_cidr'] = AzureMeta().get_subnet(edge_conf['resource_group_name'], edge_conf['vpc_name'],
                                                               edge_conf['private_subnet_name']).address_prefix
-    print('NEW SUBNET CIDR CREATED: {}'.format(edge_conf['private_subnet_cidr']))
+    logging.info('NEW SUBNET CIDR CREATED: {}'.format(edge_conf['private_subnet_cidr']))
 
     try:
         logging.info('[CREATE SECURITY GROUP FOR EDGE NODE]')
-        print('[CREATE SECURITY GROUP FOR EDGE]')
         edge_list_rules = [
             {
                 "name": "in-1",
@@ -400,7 +392,7 @@ if __name__ == "__main__":
                 AzureActions().remove_security_group(edge_conf['resource_group_name'],
                                                      edge_conf['edge_security_group_name'])
             except:
-                print("Edge Security group hasn't been created.")
+                logging.info("Edge Security group hasn't been created.")
             traceback.print_exc()
             append_result("Failed creating security group for edge node.", str(err))
             raise Exception
@@ -409,7 +401,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATE SECURITY GROUP FOR PRIVATE SUBNET]')
-        print('[CREATE SECURITY GROUP FOR PRIVATE SUBNET]')
         notebook_list_rules = [
             {
                 "name": "in-1",
@@ -500,7 +491,7 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.info('Error: {0}'.format(err))
         AzureActions().remove_subnet(edge_conf['resource_group_name'], edge_conf['vpc_name'],
                                      edge_conf['private_subnet_name'])
         AzureActions().remove_security_group(edge_conf['resource_group_name'], edge_conf['edge_security_group_name'])
@@ -508,11 +499,10 @@ if __name__ == "__main__":
             AzureActions().remove_security_group(edge_conf['resource_group_name'],
                                                  edge_conf['notebook_security_group_name'])
         except:
-            print("Notebook Security group hasn't been created.")
+            logging.info("Notebook Security group hasn't been created.")
         sys.exit(1)
 
     logging.info('[CREATING SECURITY GROUPS FOR MASTER NODE]')
-    print("[CREATING SECURITY GROUPS FOR MASTER NODE]")
     try:
         cluster_list_rules = [
             {
@@ -606,7 +596,7 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.info('Error: {0}'.format(err))
         AzureActions().remove_subnet(edge_conf['resource_group_name'], edge_conf['vpc_name'],
                                      edge_conf['private_subnet_name'])
         AzureActions().remove_security_group(edge_conf['resource_group_name'], edge_conf['edge_security_group_name'])
@@ -616,12 +606,11 @@ if __name__ == "__main__":
             AzureActions().remove_security_group(edge_conf['resource_group_name'],
                                                  edge_conf['master_security_group_name'])
         except:
-            print("Master Security group hasn't been created.")
+            logging.info("Master Security group hasn't been created.")
         append_result("Failed to create Security groups. Exception:" + str(err))
         sys.exit(1)
 
     logging.info('[CREATING SECURITY GROUPS FOR SLAVE NODES]')
-    print("[CREATING SECURITY GROUPS FOR SLAVE NODES]")
     try:
         params = "--resource_group_name {} --security_group_name {} --region {} --tags '{}' --list_rules '{}'".format(
             edge_conf['resource_group_name'], edge_conf['slave_security_group_name'], edge_conf['region'],
@@ -632,7 +621,7 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.info('Error: {0}'.format(err))
         AzureActions().remove_subnet(edge_conf['resource_group_name'], edge_conf['vpc_name'],
                                      edge_conf['private_subnet_name'])
         AzureActions().remove_security_group(edge_conf['resource_group_name'], edge_conf['edge_security_group_name'])
@@ -644,13 +633,12 @@ if __name__ == "__main__":
             AzureActions().remove_security_group(edge_conf['resource_group_name'],
                                                  edge_conf['slave_security_group_name'])
         except:
-            print("Slave Security group hasn't been created.")
+            logging.info("Slave Security group hasn't been created.")
         append_result("Failed to create Security groups. Exception:" + str(err))
         sys.exit(1)
 
     try:
         logging.info('[CREATE STORAGE ACCOUNT AND CONTAINERS]')
-        print('[CREATE STORAGE ACCOUNT AND CONTAINERS]')
 
         params = "--container_name {} --account_tags '{}' --resource_group_name {} --region {}". \
             format(edge_conf['edge_container_name'], json.dumps(edge_conf['storage_account_tags']),
@@ -661,7 +649,7 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.info('Error: {0}'.format(err))
         append_result("Failed to create storage account.", str(err))
         AzureActions().remove_subnet(edge_conf['resource_group_name'], edge_conf['vpc_name'],
                                      edge_conf['private_subnet_name'])
@@ -679,7 +667,6 @@ if __name__ == "__main__":
     if os.environ['azure_datalake_enable'] == 'true':
         try:
             logging.info('[CREATE DATA LAKE STORE DIRECTORY]')
-            print('[CREATE DATA LAKE STORE DIRECTORY]')
             params = "--resource_group_name {} --datalake_name {} --directory_name {} --ad_user {} --service_base_name {}". \
                 format(edge_conf['resource_group_name'], edge_conf['datalake_store_name'],
                        edge_conf['datalake_user_directory_name'], edge_conf['azure_ad_user_name'],
@@ -690,7 +677,7 @@ if __name__ == "__main__":
                 traceback.print_exc()
                 raise Exception
         except Exception as err:
-            print('Error: {0}'.format(err))
+            logging.info('Error: {0}'.format(err))
             append_result("Failed to create Data Lake Store directory.", str(err))
             AzureActions().remove_subnet(edge_conf['resource_group_name'], edge_conf['vpc_name'],
                                          edge_conf['private_subnet_name'])
@@ -708,8 +695,8 @@ if __name__ == "__main__":
                     if edge_conf['datalake_store_name'] == datalake.tags["Name"]:
                         AzureActions().remove_datalake_directory(datalake.name, edge_conf['datalake_user_directory_name'])
             except Exception as err:
-                print('Error: {0}'.format(err))
-                print("Data Lake Store directory hasn't been created.")
+                logging.info('Error: {0}'.format(err))
+                logging.info("Data Lake Store directory hasn't been created.")
             sys.exit(1)
 
     if os.environ['conf_os_family'] == 'debian':
@@ -721,7 +708,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATE EDGE INSTANCE]')
-        print('[CREATE EDGE INSTANCE]')
         params = "--instance_name {} --instance_size {} --region {} --vpc_name {} --network_interface_name {} \
             --security_group_name {} --subnet_name {} --service_base_name {} --resource_group_name {} \
             --datalab_ssh_user_name {} --public_ip_name {} --public_key '''{}''' --primary_disk_size {} \
@@ -738,11 +724,11 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.info('Error: {0}'.format(err))
         try:
             AzureActions().remove_instance(edge_conf['resource_group_name'], edge_conf['instance_name'])
         except:
-            print("The instance hasn't been created.")
+            logging.info("The instance hasn't been created.")
         AzureActions().remove_subnet(edge_conf['resource_group_name'], edge_conf['vpc_name'],
                                      edge_conf['private_subnet_name'])
         AzureActions().remove_security_group(edge_conf['resource_group_name'], edge_conf['edge_security_group_name'])
diff --git a/infrastructure-provisioning/src/general/scripts/azure/edge_start.py b/infrastructure-provisioning/src/general/scripts/azure/edge_start.py
index 2f9f2ba..05e9bd0 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/edge_start.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/edge_start.py
@@ -25,19 +25,12 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     AzureMeta = datalab.meta_lib.AzureMeta()
     AzureActions = datalab.actions_lib.AzureActions()
     edge_conf = dict()
@@ -51,7 +44,6 @@ if __name__ == "__main__":
                                                                             os.environ['azure_region'])
 
     logging.info('[START EDGE]')
-    print('[START EDGE]')
     try:
         AzureActions.start_instance(edge_conf['resource_group_name'], edge_conf['instance_name'])
     except Exception as err:
@@ -63,19 +55,18 @@ if __name__ == "__main__":
                                                                      edge_conf['instance_name'])
         private_ip_address = AzureMeta.get_private_ip_address(edge_conf['resource_group_name'],
                                                               edge_conf['instance_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(edge_conf['instance_name']))
-        print("Hostname: {}".format(edge_conf['instance_dns_name']))
-        print("Public IP: {}".format(public_ip_address))
-        print("Private IP: {}".format(private_ip_address))
+        logging.info("Instance name: {}".format(edge_conf['instance_name']))
+        logging.info("Hostname: {}".format(edge_conf['instance_dns_name']))
+        logging.info("Public IP: {}".format(public_ip_address))
+        logging.info("Private IP: {}".format(private_ip_address))
         with open("/root/result.json", 'w') as result:
             res = {"instance_name": edge_conf['instance_name'],
                    "hostname": edge_conf['instance_dns_name'],
                    "public_ip": public_ip_address,
                    "ip": private_ip_address,
                    "Action": "Start up notebook server"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/azure/edge_status.py b/infrastructure-provisioning/src/general/scripts/azure/edge_status.py
index 6f41654..c59dc37 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/edge_status.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/edge_status.py
@@ -24,7 +24,7 @@
 import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -32,20 +32,13 @@ import subprocess
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/edge/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     edge_conf = dict()
     edge_conf['service_base_name'] = os.environ['conf_service_base_name']
     edge_conf['resource_group_name'] = os.environ['azure_resource_group_name']
 
     try:
         logging.info('[COLLECT DATA]')
-        print('[COLLECTING DATA]')
+        logging.info('[COLLECTING DATA]')
         params = '--resource_group_name {} --list_resources "{}"'.format(edge_conf['resource_group_name'],
                                                                          os.environ['edge_list_resources'])
         try:
diff --git a/infrastructure-provisioning/src/general/scripts/azure/edge_stop.py b/infrastructure-provisioning/src/general/scripts/azure/edge_stop.py
index d2bbf5c..8c5efa0 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/edge_stop.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/edge_stop.py
@@ -25,19 +25,12 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     AzureMeta = datalab.meta_lib.AzureMeta()
     AzureActions = datalab.actions_lib.AzureActions()
     edge_conf = dict()
@@ -49,7 +42,6 @@ if __name__ == "__main__":
                                                            edge_conf['project_name'], edge_conf['endpoint_name'])
 
     logging.info('[STOP EDGE]')
-    print('[STOP EDGE]')
     try:
         AzureActions.stop_instance(edge_conf['resource_group_name'], edge_conf['instance_name'])
     except Exception as err:
@@ -60,7 +52,7 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"instance_name": edge_conf['instance_name'],
                    "Action": "Stop edge server"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/azure/edge_terminate.py b/infrastructure-provisioning/src/general/scripts/azure/edge_terminate.py
index f7c470a..fc317d9 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/edge_terminate.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/edge_terminate.py
@@ -25,122 +25,115 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
 
 
 def terminate_edge_node(resource_group_name, service_base_name, project_tag, subnet_name, vpc_name):
-    print("Terminating EDGE, notebook and dataengine virtual machines")
+    logging.info("Terminating EDGE, notebook and dataengine virtual machines")
     try:
         for vm in AzureMeta.compute_client.virtual_machines.list(resource_group_name):
             try:
                 if project_tag == vm.tags["project_tag"]:
                     AzureActions.remove_instance(resource_group_name, vm.name)
-                    print("Instance {} has been terminated".format(vm.name))
+                    logging.info("Instance {} has been terminated".format(vm.name))
             except:
                 pass
     except Exception as err:
         datalab.fab.append_result("Failed to terminate instance", str(err))
         sys.exit(1)
 
-    print("Removing network interfaces")
+    logging.info("Removing network interfaces")
     try:
         for network_interface in AzureMeta.list_network_interfaces(resource_group_name):
             try:
                 if project_tag == network_interface.tags["project_tag"]:
                     AzureActions.delete_network_if(resource_group_name, network_interface.name)
-                    print("Network interface {} has been removed".format(network_interface.name))
+                    logging.info("Network interface {} has been removed".format(network_interface.name))
             except:
                 pass
     except Exception as err:
         datalab.fab.append_result("Failed to remove network interfaces", str(err))
         sys.exit(1)
 
-    print("Removing static public IPs")
+    logging.info("Removing static public IPs")
     try:
         for static_public_ip in AzureMeta.list_static_ips(resource_group_name):
             try:
                 if project_tag in static_public_ip.tags["project_tag"]:
                     AzureActions.delete_static_public_ip(resource_group_name, static_public_ip.name)
-                    print("Static public IP {} has been removed".format(static_public_ip.name))
+                    logging.info("Static public IP {} has been removed".format(static_public_ip.name))
             except:
                 pass
     except Exception as err:
         datalab.fab.append_result("Failed to remove static IPs", str(err))
         sys.exit(1)
 
-    print("Removing disks")
+    logging.info("Removing disks")
     try:
         for disk in AzureMeta.list_disks(resource_group_name):
             try:
                 if project_tag in disk.tags["project_tag"]:
                     AzureActions.remove_disk(resource_group_name, disk.name)
-                    print("Disk {} has been removed".format(disk.name))
+                    logging.info("Disk {} has been removed".format(disk.name))
             except:
                 pass
     except Exception as err:
         datalab.fab.append_result("Failed to remove disks", str(err))
         sys.exit(1)
 
-    print("Removing storage account")
+    logging.info("Removing storage account")
     try:
         for storage_account in AzureMeta.list_storage_accounts(resource_group_name):
             try:
                 if project_tag == storage_account.tags["project_tag"]:
                     AzureActions.remove_storage_account(resource_group_name, storage_account.name)
-                    print("Storage account {} has been terminated".format(storage_account.name))
+                    logging.info("Storage account {} has been terminated".format(storage_account.name))
             except:
                 pass
     except Exception as err:
         datalab.fab.append_result("Failed to remove storage accounts", str(err))
         sys.exit(1)
 
-    print("Deleting Data Lake Store directory")
+    logging.info("Deleting Data Lake Store directory")
     try:
         for datalake in AzureMeta.list_datalakes(resource_group_name):
             try:
                 if service_base_name == datalake.tags["SBN"]:
                     AzureActions.remove_datalake_directory(datalake.name, project_tag + '-folder')
-                    print("Data Lake Store directory {} has been deleted".format(project_tag + '-folder'))
+                    logging.info("Data Lake Store directory {} has been deleted".format(project_tag + '-folder'))
             except:
                 pass
     except Exception as err:
         datalab.fab.append_result("Failed to remove Data Lake", str(err))
         sys.exit(1)
 
-    print("Removing security groups")
+    logging.info("Removing security groups")
     try:
         for sg in AzureMeta.network_client.network_security_groups.list(resource_group_name):
             try:
                 if project_tag == sg.tags["project_tag"]:
                     AzureActions.remove_security_group(resource_group_name, sg.name)
-                    print("Security group {} has been terminated".format(sg.name))
+                    logging.info("Security group {} has been terminated".format(sg.name))
             except:
                 pass
     except Exception as err:
         datalab.fab.append_result("Failed to remove security groups", str(err))
         sys.exit(1)
 
-    print("Removing private subnet")
+    logging.info("Removing private subnet")
     try:
         AzureActions.remove_subnet(resource_group_name, vpc_name, subnet_name)
-        print("Private subnet {} has been terminated".format(subnet_name))
+        logging.info("Private subnet {} has been terminated".format(subnet_name))
     except Exception as err:
         datalab.fab.append_result("Failed to remove subnet", str(err))
         sys.exit(1)
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/edge/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     AzureMeta = datalab.meta_lib.AzureMeta()
     AzureActions = datalab.actions_lib.AzureActions()
     edge_conf = dict()
@@ -157,7 +150,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[TERMINATE EDGE]')
-        print('[TERMINATE EDGE]')
         try:
             terminate_edge_node(edge_conf['resource_group_name'], edge_conf['service_base_name'],
                                 edge_conf['project_tag'], edge_conf['private_subnet_name'], edge_conf['vpc_name'])
@@ -173,7 +165,7 @@ if __name__ == "__main__":
             res = {"service_base_name": os.environ['conf_service_base_name'],
                    "project_name": edge_conf['project_name'],
                    "Action": "Terminate edge node"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/azure/jupyter_configure.py b/infrastructure-provisioning/src/general/scripts/azure/jupyter_configure.py
index 0f30b71..bc26e72 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/jupyter_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/jupyter_configure.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -33,12 +33,6 @@ import subprocess
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         AzureMeta = datalab.meta_lib.AzureMeta()
         AzureActions = datalab.actions_lib.AzureActions()
@@ -134,7 +128,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             instance_hostname, os.environ['conf_key_dir'] + os.environ['conf_key_name'] + ".pem",
             notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
@@ -152,7 +145,6 @@ if __name__ == "__main__":
     # configuring proxy on Notebook instance
     try:
         logging.info('[CONFIGURE PROXY ON JUPYTER INSTANCE]')
-        print('[CONFIGURE PROXY ON JUPYTER INSTANCE]')
         additional_config = {"proxy_host": edge_instance_private_hostname, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}"\
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
@@ -170,7 +162,6 @@ if __name__ == "__main__":
     # updating repositories & installing python packages
     try:
         logging.info('[INSTALLING PREREQUISITES TO JUPYTER NOTEBOOK INSTANCE]')
-        print('[INSTALLING PREREQUISITES TO JUPYTER NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}". \
             format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['azure_region'],
                    edge_instance_private_hostname)
@@ -187,7 +178,6 @@ if __name__ == "__main__":
     # installing and configuring jupiter and all dependencies
     try:
         logging.info('[CONFIGURE JUPYTER NOTEBOOK INSTANCE]')
-        print('[CONFIGURE JUPYTER NOTEBOOK INSTANCE]')
         params = "--hostname {0} --keyfile {1} " \
                  "--region {2} --spark_version {3} " \
                  "--hadoop_version {4} --os_user {5} " \
@@ -211,7 +201,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": notebook_config['user_keyname'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -228,7 +217,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
@@ -245,7 +233,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[POST CONFIGURING PROCESS]')
-        print('[POST CONFIGURING PROCESS')
         if notebook_config['notebook_image_name'] not in [notebook_config['expected_image_name'], 'None', '']:
             params = "--hostname {} --keyfile {} --os_user {} --resource_group_name {} --notebook_name {}" \
                 .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
@@ -262,11 +249,11 @@ if __name__ == "__main__":
 
     if notebook_config['image_enabled'] == 'true':
         try:
-            print('[CREATING IMAGE]')
+            logging.info('[CREATING IMAGE]')
             image = AzureMeta.get_image(notebook_config['resource_group_name'],
                                         notebook_config['expected_image_name'])
             if image == '':
-                print("Looks like it's first time we configure notebook server. Creating image.")
+                logging.info("Looks like it's first time we configure notebook server. Creating image.")
                 datalab.actions_lib.prepare_vm_for_image(True, notebook_config['datalab_ssh_user'], instance_hostname,
                                                          keyfile_name)
                 AzureActions.create_image_from_instance(notebook_config['resource_group_name'],
@@ -274,7 +261,7 @@ if __name__ == "__main__":
                                                         os.environ['azure_region'],
                                                         notebook_config['expected_image_name'],
                                                         json.dumps(notebook_config['image_tags']))
-                print("Image was successfully created.")
+                logging.info("Image was successfully created.")
                 subprocess.run("~/scripts/{}.py".format('common_prepare_notebook'), shell=True, check=True)
                 instance_running = False
                 while not instance_running:
@@ -298,7 +285,6 @@ if __name__ == "__main__":
             sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         additional_info = {
             'instance_hostname': instance_hostname,
@@ -336,17 +322,16 @@ if __name__ == "__main__":
             notebook_config['exploratory_name'])
         jupyter_ungit_access_url = "https://" + edge_instance_hostname + "/{}-ungit/".format(
             notebook_config['exploratory_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['instance_name']))
-        print("Private IP: {}".format(ip_address))
-        print("Instance type: {}".format(notebook_config['instance_size']))
-        print("Key name: {}".format(notebook_config['key_name']))
-        print("User key name: {}".format(notebook_config['user_keyname']))
-        print("SG name: {}".format(notebook_config['security_group_name']))
-        print("Jupyter URL: {}".format(jupyter_ip_url))
-        print("Ungit URL: {}".format(ungit_ip_url))
-        print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.
+        logging.info("Instance name: {}".format(notebook_config['instance_name']))
+        logging.info("Private IP: {}".format(ip_address))
+        logging.info("Instance type: {}".format(notebook_config['instance_size']))
+        logging.info("Key name: {}".format(notebook_config['key_name']))
+        logging.info("User key name: {}".format(notebook_config['user_keyname']))
+        logging.info("SG name: {}".format(notebook_config['security_group_name']))
+        logging.info("Jupyter URL: {}".format(jupyter_ip_url))
+        logging.info("Ungit URL: {}".format(ungit_ip_url))
+        logging.info('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.
               format(notebook_config['key_name'], notebook_config['datalab_ssh_user'], ip_address))
 
         with open("/root/result.json", 'w') as result:
diff --git a/infrastructure-provisioning/src/general/scripts/azure/jupyterlab_configure.py b/infrastructure-provisioning/src/general/scripts/azure/jupyterlab_configure.py
index 4d8fe90..d8cefb6 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/jupyterlab_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/jupyterlab_configure.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -33,12 +33,6 @@ import subprocess
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         AzureMeta = datalab.meta_lib.AzureMeta()
         AzureActions = datalab.actions_lib.AzureActions()
@@ -134,7 +128,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format \
             (instance_hostname, os.environ['conf_key_dir'] + os.environ['conf_key_name'] + ".pem", initial_user,
              notebook_config['datalab_ssh_user'], sudo_group)
@@ -152,7 +145,6 @@ if __name__ == "__main__":
     # configuring proxy on Notebook instance
     try:
         logging.info('[CONFIGURE PROXY ON JUPYTER INSTANCE]')
-        print('[CONFIGURE PROXY ON JUPYTER INSTANCE]')
         additional_config = {"proxy_host": edge_instance_private_hostname, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}"\
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
@@ -170,7 +162,6 @@ if __name__ == "__main__":
     # updating repositories & installing python packages
     try:
         logging.info('[INSTALLING PREREQUISITES TO JUPYTER NOTEBOOK INSTANCE]')
-        print('[INSTALLING PREREQUISITES TO JUPYTER NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}". \
             format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['azure_region'],
                    edge_instance_private_hostname)
@@ -187,7 +178,6 @@ if __name__ == "__main__":
     # installing and configuring jupiter and all dependencies
     try:
         logging.info('[CONFIGURE JUPYTERLAB NOTEBOOK INSTANCE]')
-        print('[CONFIGURE JUPYTER NOTEBOOK INSTANCE]')
         params = "--hostname {0} --keyfile {1} " \
                  "--region {2} --spark_version {3} " \
                  "--hadoop_version {4} --os_user {5} " \
@@ -210,7 +200,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": notebook_config['user_keyname'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -227,7 +216,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
@@ -244,10 +232,10 @@ if __name__ == "__main__":
 
     if notebook_config['image_enabled'] == 'true':
         try:
-            print('[CREATING IMAGE]')
+            logging.info('[CREATING IMAGE]')
             image = AzureMeta.get_image(notebook_config['resource_group_name'], notebook_config['expected_image_name'])
             if image == '':
-                print("Looks like it's first time we configure notebook server. Creating image.")
+                logging.info("Looks like it's first time we configure notebook server. Creating image.")
                 datalab.actions_lib.prepare_vm_for_image(True, notebook_config['datalab_ssh_user'], instance_hostname,
                                                          keyfile_name)
                 AzureActions.create_image_from_instance(notebook_config['resource_group_name'],
@@ -255,7 +243,7 @@ if __name__ == "__main__":
                                                         os.environ['azure_region'],
                                                         notebook_config['expected_image_name'],
                                                         json.dumps(notebook_config['image_tags']))
-                print("Image was successfully created.")
+                logging.info("Image was successfully created.")
                 subprocess.run("~/scripts/{}.py".format('common_prepare_notebook'), shell=True, check=True)
                 instance_running = False
                 while not instance_running:
@@ -279,7 +267,6 @@ if __name__ == "__main__":
             sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         additional_info = {
             'instance_hostname': instance_hostname,
@@ -308,7 +295,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[CONFIGURING PROXY FOR DOCKER]')
         logging.info('[CONFIGURING PROXY FOR DOCKER]')
         params = "--hostname {} " \
                  "--keyfile {} " \
@@ -328,7 +314,6 @@ if __name__ == "__main__":
 
 
     try:
-        print('[STARTING JUPYTER CONTAINER]')
         logging.info('[STARTING JUPYTER CONTAINER]')
         params = "--hostname {} " \
                  "--keyfile {} " \
@@ -356,17 +341,16 @@ if __name__ == "__main__":
             notebook_config['exploratory_name'])
         jupyter_ungit_acces_url = "http://" + edge_instance_hostname + "/{}-ungit/".format(
             notebook_config['exploratory_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['instance_name']))
-        print("Private IP: {}".format(ip_address))
-        print("Instance type: {}".format(notebook_config['instance_size']))
-        print("Key name: {}".format(notebook_config['key_name']))
-        print("User key name: {}".format(notebook_config['user_keyname']))
-        print("SG name: {}".format(notebook_config['security_group_name']))
-        print("Jupyter URL: {}".format(jupyter_ip_url))
-        print("Ungit URL: {}".format(ungit_ip_url))
-        print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.
+        logging.info("Instance name: {}".format(notebook_config['instance_name']))
+        logging.info("Private IP: {}".format(ip_address))
+        logging.info("Instance type: {}".format(notebook_config['instance_size']))
+        logging.info("Key name: {}".format(notebook_config['key_name']))
+        logging.info("User key name: {}".format(notebook_config['user_keyname']))
+        logging.info("SG name: {}".format(notebook_config['security_group_name']))
+        logging.info("Jupyter URL: {}".format(jupyter_ip_url))
+        logging.info("Ungit URL: {}".format(ungit_ip_url))
+        logging.info('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.
               format(notebook_config['key_name'], notebook_config['datalab_ssh_user'], ip_address))
 
         with open("/root/result.json", 'w') as result:
diff --git a/infrastructure-provisioning/src/general/scripts/azure/project_prepare.py b/infrastructure-provisioning/src/general/scripts/azure/project_prepare.py
index 29fce72..a3bc925 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/project_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/project_prepare.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -34,15 +34,8 @@ from Crypto.PublicKey import RSA
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/project/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     try:
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         AzureMeta = datalab.meta_lib.AzureMeta()
         AzureActions = datalab.actions_lib.AzureActions()
         project_conf = dict()
@@ -134,12 +127,12 @@ if __name__ == "__main__":
                 subprocess.run('echo "{0}" >> {1}{2}.pub'.format(project_conf['user_key'], os.environ['conf_key_dir'],
                                                         project_conf['project_name']), shell=True, check=True)
             except:
-                print("ADMINSs PUBLIC KEY DOES NOT INSTALLED")
+                logging.info("ADMINSs PUBLIC KEY DOES NOT INSTALLED")
         except KeyError:
-            print("ADMINSs PUBLIC KEY DOES NOT UPLOADED")
+            logging.error("ADMINSs PUBLIC KEY DOES NOT UPLOADED")
             sys.exit(1)
 
-        print("Will create exploratory environment with edge node as access point as following: {}".format(json.dumps(
+        logging.info("Will create exploratory environment with edge node as access point as following: {}".format(json.dumps(
             project_conf, sort_keys=True, indent=4, separators=(',', ': '))))
         logging.info(json.dumps(project_conf))
     except Exception as err:
@@ -149,7 +142,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATE SUBNET]')
-        print('[CREATE SUBNET]')
         params = "--resource_group_name {} --vpc_name {} --region {} --vpc_cidr {} --subnet_name {} --prefix {}".\
             format(project_conf['resource_group_name'], project_conf['vpc_name'], project_conf['region'],
                    project_conf['vpc_cidr'], project_conf['private_subnet_name'], project_conf['private_subnet_prefix'])
@@ -163,19 +155,19 @@ if __name__ == "__main__":
             AzureActions.remove_subnet(project_conf['resource_group_name'], project_conf['vpc_name'],
                                        project_conf['private_subnet_name'])
         except:
-            print("Subnet hasn't been created.")
+            logging.info("Subnet hasn't been created.")
         datalab.fab.append_result("Failed to create subnet.", str(err))
         sys.exit(1)
 
     project_conf['private_subnet_cidr'] = AzureMeta.get_subnet(project_conf['resource_group_name'],
                                                                project_conf['vpc_name'],
                                                                project_conf['private_subnet_name']).address_prefix
-    print('NEW SUBNET CIDR CREATED: {}'.format(project_conf['private_subnet_cidr']))
+    logging.info('NEW SUBNET CIDR CREATED: {}'.format(project_conf['private_subnet_cidr']))
 
     try:
         if 'azure_edge_security_group_name' in os.environ:
             logging.info('Security group predefined, adding new rule with endpoint IP')
-            print('Security group predefined, adding new rule with endpoint IP')
+            logging.info('Security group predefined, adding new rule with endpoint IP')
             if project_conf['endpoint_name'] == 'local':
                 endpoint_ip = AzureMeta.get_instance_public_ip_address(project_conf['resource_group_name'],
                                                           '{}-ssn'.format(project_conf['service_base_name']))
@@ -214,7 +206,7 @@ if __name__ == "__main__":
                                            project_conf['private_subnet_name'])
         else:
             logging.info('[CREATE SECURITY GROUP FOR EDGE NODE]')
-            print('[CREATE SECURITY GROUP FOR EDGE]')
+            logging.info('[CREATE SECURITY GROUP FOR EDGE]')
             edge_list_rules = [
                 {
                     "name": "in-1",
@@ -493,7 +485,7 @@ if __name__ == "__main__":
                     AzureActions.remove_security_group(project_conf['resource_group_name'],
                                                        project_conf['edge_security_group_name'])
                 except:
-                    print("Edge Security group hasn't been created.")
+                    logging.info("Edge Security group hasn't been created.")
                 traceback.print_exc()
                 datalab.fab.append_result("Failed creating security group for edge node.", str(err))
                 raise Exception
@@ -503,7 +495,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATE SECURITY GROUP FOR PRIVATE SUBNET]')
-        print('[CREATE SECURITY GROUP FOR PRIVATE SUBNET]')
         notebook_list_rules = [
             {
                 "name": "in-1",
@@ -606,11 +597,10 @@ if __name__ == "__main__":
             AzureActions.remove_security_group(project_conf['resource_group_name'],
                                                project_conf['notebook_security_group_name'])
         except:
-            print("Notebook Security group hasn't been created.")
+            logging.info("Notebook Security group hasn't been created.")
         sys.exit(1)
 
     logging.info('[CREATING SECURITY GROUPS FOR MASTER NODE]')
-    print("[CREATING SECURITY GROUPS FOR MASTER NODE]")
     try:
         cluster_list_rules = [
             {
@@ -715,12 +705,11 @@ if __name__ == "__main__":
             AzureActions.remove_security_group(project_conf['resource_group_name'],
                                                project_conf['master_security_group_name'])
         except:
-            print("Master Security group hasn't been created.")
+            logging.info("Master Security group hasn't been created.")
         datalab.fab.append_result("Failed to create Security groups. Exception:" + str(err))
         sys.exit(1)
 
     logging.info('[CREATING SECURITY GROUPS FOR SLAVE NODES]')
-    print("[CREATING SECURITY GROUPS FOR SLAVE NODES]")
     try:
         params = "--resource_group_name {} --security_group_name {} --region {} --tags '{}' --list_rules '{}'".format(
             project_conf['resource_group_name'], project_conf['slave_security_group_name'], project_conf['region'],
@@ -744,13 +733,12 @@ if __name__ == "__main__":
             AzureActions.remove_security_group(project_conf['resource_group_name'],
                                                project_conf['slave_security_group_name'])
         except:
-            print("Slave Security group hasn't been created.")
+            logging.info("Slave Security group hasn't been created.")
         datalab.fab.append_result("Failed to create Security groups. Exception:" + str(err))
         sys.exit(1)
 
     try:
         logging.info('[CREATE SHARED STORAGE ACCOUNT AND CONTAINER]')
-        print('[CREATE SHARED STORAGE ACCOUNT AND CONTAINER]')
         params = "--container_name {} --account_tags '{}' --resource_group_name {} --region {}". \
             format(project_conf['shared_container_name'], json.dumps(project_conf['shared_storage_account_tags']),
                    project_conf['resource_group_name'], project_conf['region'])
@@ -775,7 +763,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATE STORAGE ACCOUNT AND CONTAINERS]')
-        print('[CREATE STORAGE ACCOUNT AND CONTAINERS]')
 
         params = "--container_name {} --account_tags '{}' --resource_group_name {} --region {}". \
             format(project_conf['edge_container_name'], json.dumps(project_conf['storage_account_tags']),
@@ -808,7 +795,7 @@ if __name__ == "__main__":
     if os.environ['azure_datalake_enable'] == 'true':
         try:
             logging.info('[CREATE DATA LAKE STORE DIRECTORY]')
-            print('[CREATE DATA LAKE STORE DIRECTORY]')
+            logging.info('[CREATE DATA LAKE STORE DIRECTORY]')
             params = "--resource_group_name {} --datalake_name {} --directory_name {} --ad_user {} " \
                      "--service_base_name {}".format(project_conf['resource_group_name'],
                                                      project_conf['datalake_store_name'],
@@ -844,7 +831,7 @@ if __name__ == "__main__":
                         AzureActions.remove_datalake_directory(datalake.name,
                                                                  project_conf['datalake_user_directory_name'])
             except:
-                print("Data Lake Store directory hasn't been created.")
+                logging.info("Data Lake Store directory hasn't been created.")
             sys.exit(1)
 
     if os.environ['conf_os_family'] == 'debian':
@@ -856,7 +843,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATE EDGE INSTANCE]')
-        print('[CREATE EDGE INSTANCE]')
         if 'azure_edge_security_group_name' in os.environ:
             project_conf['edge_security_group_name'] = os.environ['azure_edge_security_group_name']
         params = "--instance_name {} --instance_size {} --region {} --vpc_name {} --network_interface_name {} \
@@ -880,7 +866,7 @@ if __name__ == "__main__":
         try:
             AzureActions.remove_instance(project_conf['resource_group_name'], project_conf['instance_name'])
         except:
-            print("The instance hasn't been created.")
+            logging.info("The instance hasn't been created.")
         AzureActions.remove_subnet(project_conf['resource_group_name'], project_conf['vpc_name'],
                                    project_conf['private_subnet_name'])
         if 'azure_edge_security_group_name' not in os.environ:
diff --git a/infrastructure-provisioning/src/general/scripts/azure/project_terminate.py b/infrastructure-provisioning/src/general/scripts/azure/project_terminate.py
index cfe04db..7be7bc5 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/project_terminate.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/project_terminate.py
@@ -25,7 +25,7 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import requests
 import sys
@@ -33,96 +33,96 @@ import traceback
 
 
 def terminate_edge_node(resource_group_name, service_base_name, project_tag, subnet_name, vpc_name, endpoint_name):
-    print("Terminating EDGE, notebook and dataengine virtual machines")
+    logging.info("Terminating EDGE, notebook and dataengine virtual machines")
     try:
         for vm in AzureMeta.compute_client.virtual_machines.list(resource_group_name):
             try:
                 if project_tag == vm.tags["project_tag"]:
                     AzureActions.remove_instance(resource_group_name, vm.name)
-                    print("Instance {} has been terminated".format(vm.name))
+                    logging.info("Instance {} has been terminated".format(vm.name))
             except:
                 pass
     except Exception as err:
         datalab.fab.append_result("Failed to terminate edge instance.", str(err))
         sys.exit(1)
 
-    print("Removing network interfaces")
+    logging.info("Removing network interfaces")
     try:
         for network_interface in AzureMeta.list_network_interfaces(resource_group_name):
             try:
                 if project_tag == network_interface.tags["project_name"]:
                     AzureActions.delete_network_if(resource_group_name, network_interface.name)
-                    print("Network interface {} has been removed".format(network_interface.name))
+                    logging.info("Network interface {} has been removed".format(network_interface.name))
             except:
                 pass
     except Exception as err:
         datalab.fab.append_result("Failed to remove network interfaces.", str(err))
         sys.exit(1)
 
-    print("Removing static public IPs")
+    logging.info("Removing static public IPs")
     try:
         for static_public_ip in AzureMeta.list_static_ips(resource_group_name):
             try:
                 if project_tag in static_public_ip.tags["project_tag"]:
                     AzureActions.delete_static_public_ip(resource_group_name, static_public_ip.name)
-                    print("Static public IP {} has been removed".format(static_public_ip.name))
+                    logging.info("Static public IP {} has been removed".format(static_public_ip.name))
             except:
                 pass
     except Exception as err:
         datalab.fab.append_result("Failed to remove static IP addresses.", str(err))
         sys.exit(1)
 
-    print("Removing disks")
+    logging.info("Removing disks")
     try:
         for disk in AzureMeta.list_disks(resource_group_name):
             try:
                 if project_tag in disk.tags["project_tag"]:
                     AzureActions.remove_disk(resource_group_name, disk.name)
-                    print("Disk {} has been removed".format(disk.name))
+                    logging.info("Disk {} has been removed".format(disk.name))
             except:
                 pass
     except Exception as err:
         datalab.fab.append_result("Failed to remove volumes.", str(err))
         sys.exit(1)
 
-    print("Removing storage account")
+    logging.info("Removing storage account")
     try:
         for storage_account in AzureMeta.list_storage_accounts(resource_group_name):
             try:
                 if project_tag == storage_account.tags["project_tag"]:
                     AzureActions.remove_storage_account(resource_group_name, storage_account.name)
-                    print("Storage account {} has been terminated".format(storage_account.name))
+                    logging.info("Storage account {} has been terminated".format(storage_account.name))
             except:
                 pass
     except Exception as err:
         datalab.fab.append_result("Failed to remove storage accounts.", str(err))
         sys.exit(1)
 
-    print("Deleting Data Lake Store directory")
+    logging.info("Deleting Data Lake Store directory")
     try:
         for datalake in AzureMeta.list_datalakes(resource_group_name):
             try:
                 if service_base_name == datalake.tags["SBN"]:
                     AzureActions.remove_datalake_directory(datalake.name, project_tag + '-folder')
-                    print("Data Lake Store directory {} has been deleted".format(project_tag + '-folder'))
+                    logging.info("Data Lake Store directory {} has been deleted".format(project_tag + '-folder'))
             except:
                 pass
     except Exception as err:
         datalab.fab.append_result("Failed to remove Data Lake.", str(err))
         sys.exit(1)
 
-    print("Removing project specific images")
+    logging.info("Removing project specific images")
     try:
         for image in AzureMeta.list_images():
             if service_base_name == image.tags["SBN"] and project_tag == image.tags["project_tag"] \
                     and endpoint_name == image.tags["endpoint_tag"]:
                 AzureActions.remove_image(resource_group_name, image.name)
-                print("Image {} has been removed".format(image.name))
+                logging.info("Image {} has been removed".format(image.name))
     except Exception as err:
         datalab.fab.append_result("Failed to remove images", str(err))
         sys.exit(1)
 
-    print("Removing security groups")
+    logging.info("Removing security groups")
     try:
         if 'azure_edge_security_group_name' in os.environ:
             AzureActions.remove_security_rules(os.environ['azure_edge_security_group_name'],
@@ -134,31 +134,24 @@ def terminate_edge_node(resource_group_name, service_base_name, project_tag, sub
             try:
                 if project_tag == sg.tags["project_tag"]:
                     AzureActions.remove_security_group(resource_group_name, sg.name)
-                    print("Security group {} has been terminated".format(sg.name))
+                    logging.info("Security group {} has been terminated".format(sg.name))
             except:
                 pass
     except Exception as err:
         datalab.fab.append_result("Failed to remove security groups.", str(err))
         sys.exit(1)
 
-    print("Removing private subnet")
+    logging.info("Removing private subnet")
     try:
         AzureActions.remove_subnet(resource_group_name, vpc_name, subnet_name)
-        print("Private subnet {} has been terminated".format(subnet_name))
+        logging.info("Private subnet {} has been terminated".format(subnet_name))
     except Exception as err:
         datalab.fab.append_result("Failed to remove subnets.", str(err))
         sys.exit(1)
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/edge/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     AzureMeta = datalab.meta_lib.AzureMeta()
     AzureActions = datalab.actions_lib.AzureActions()
     project_conf = dict()
@@ -175,7 +168,7 @@ if __name__ == "__main__":
 
     try:
         logging.info('[TERMINATE EDGE]')
-        print('[TERMINATE EDGE]')
+        logging.info('[TERMINATE EDGE]')
         try:
             terminate_edge_node(project_conf['resource_group_name'], project_conf['service_base_name'],
                                 project_conf['project_tag'], project_conf['private_subnet_name'],
@@ -188,7 +181,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[KEYCLOAK PROJECT CLIENT DELETE]')
         logging.info('[KEYCLOAK PROJECT CLIENT DELETE]')
         keycloak_auth_server_url = '{}/realms/master/protocol/openid-connect/token'.format(
             os.environ['keycloak_auth_server_url'])
@@ -223,14 +215,14 @@ if __name__ == "__main__":
                                           headers={"Authorization": "Bearer " + keycloak_token.get("access_token"),
                                                    "Content-Type": "application/json"})
     except Exception as err:
-        print("Failed to remove project client from Keycloak", str(err))
+        logging.info("Failed to remove project client from Keycloak", str(err))
 
     try:
         with open("/root/result.json", 'w') as result:
             res = {"service_base_name": os.environ['conf_service_base_name'],
                    "project_name": project_conf['project_name'],
                    "Action": "Terminate edge node"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/azure/rstudio_change_pass.py b/infrastructure-provisioning/src/general/scripts/azure/rstudio_change_pass.py
index 57a3aa5..d90e4fb 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/rstudio_change_pass.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/rstudio_change_pass.py
@@ -24,6 +24,7 @@
 import argparse
 import sys
 from datalab.fab import *
+from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -34,15 +35,15 @@ parser.add_argument('--rstudio_pass', type=str, default='')
 args = parser.parse_args()
 
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     global conn
     conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
 
-    print("Setting password for Rstudio user.")
+    logging.info("Setting password for Rstudio user.")
     try:
         conn.sudo('''bash -c 'echo "{0}:{1}" | chpasswd' '''.format(args.os_user, args.rstudio_pass))
         conn.close()
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.info('Error: {0}'.format(err))
         sys.exit(1)
 
diff --git a/infrastructure-provisioning/src/general/scripts/azure/rstudio_configure.py b/infrastructure-provisioning/src/general/scripts/azure/rstudio_configure.py
index bbf85e5..9395f6d 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/rstudio_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/rstudio_configure.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -34,12 +34,6 @@ from fabric import *
 
 if __name__ == "__main__":
     instance_class = 'notebook'
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         AzureMeta = datalab.meta_lib.AzureMeta()
         AzureActions = datalab.actions_lib.AzureActions()
@@ -135,7 +129,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             instance_hostname, os.environ['conf_key_dir'] + os.environ['conf_key_name'] + ".pem",
             notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
@@ -153,7 +146,6 @@ if __name__ == "__main__":
     # configuring proxy on Notebook instance
     try:
         logging.info('[CONFIGURE PROXY ON R_STUDIO INSTANCE]')
-        print('[CONFIGURE PROXY ON R_STUDIO INSTANCE]')
         additional_config = {"proxy_host": edge_instance_private_hostname, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}" \
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
@@ -171,7 +163,6 @@ if __name__ == "__main__":
     # updating repositories & installing python packages
     try:
         logging.info('[INSTALLING PREREQUISITES TO R_STUDIO NOTEBOOK INSTANCE]')
-        print('[INSTALLING PREREQUISITES TO R_STUDIO NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}". \
             format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['azure_region'],
                    edge_instance_private_hostname)
@@ -188,7 +179,6 @@ if __name__ == "__main__":
     # installing and configuring R_STUDIO and all dependencies
     try:
         logging.info('[CONFIGURE RSTUDIO NOTEBOOK INSTANCE]')
-        print('[CONFIGURE RSTUDIO NOTEBOOK INSTANCE]')
         params = "--hostname {0}  --keyfile {1} " \
                  "--region {2} --rstudio_pass {3} " \
                  "--rstudio_version {4} --os_user {5} " \
@@ -211,7 +201,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": notebook_config['user_keyname'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -228,7 +217,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
@@ -244,7 +232,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[POST CONFIGURING PROCESS]')
-        print('[POST CONFIGURING PROCESS')
         if notebook_config['notebook_image_name'] not in [notebook_config['expected_image_name'], 'None', '']:
             params = "--hostname {} --keyfile {} --os_user {} --resource_group_name {} --notebook_name {}" \
                 .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
@@ -261,11 +248,11 @@ if __name__ == "__main__":
 
     if notebook_config['image_enabled'] == 'true':
         try:
-            print('[CREATING IMAGE]')
+            logging.info('[CREATING IMAGE]')
             image = AzureMeta.get_image(notebook_config['resource_group_name'],
                                         notebook_config['expected_image_name'])
             if image == '':
-                print("Looks like it's first time we configure notebook server. Creating image.")
+                logging.info("Looks like it's first time we configure notebook server. Creating image.")
                 datalab.actions_lib.prepare_vm_for_image(True, notebook_config['datalab_ssh_user'], instance_hostname,
                                                          keyfile_name)
                 AzureActions.create_image_from_instance(notebook_config['resource_group_name'],
@@ -273,7 +260,7 @@ if __name__ == "__main__":
                                                         os.environ['azure_region'],
                                                         notebook_config['expected_image_name'],
                                                         json.dumps(notebook_config['image_tags']))
-                print("Image was successfully created.")
+                logging.info("Image was successfully created.")
                 subprocess.run("~/scripts/{}.py".format('common_prepare_notebook'), shell=True, check=True)
                 instance_running = False
                 while not instance_running:
@@ -297,7 +284,7 @@ if __name__ == "__main__":
             sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
+        logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         additional_info = {
             'instance_hostname': instance_hostname,
@@ -335,19 +322,18 @@ if __name__ == "__main__":
             notebook_config['exploratory_name'])
         rstudio_ungit_access_url = "https://" + edge_instance_hostname + "/{}-ungit/".format(
             notebook_config['exploratory_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['instance_name']))
-        print("Private IP: {}".format(ip_address))
-        print("Instance type: {}".format(notebook_config['instance_size']))
-        print("Key name: {}".format(notebook_config['key_name']))
-        print("User key name: {}".format(notebook_config['user_keyname']))
-        print("SG name: {}".format(notebook_config['security_group_name']))
-        print("Rstudio URL: {}".format(rstudio_ip_url))
-        print("Rstudio user: {}".format(notebook_config['datalab_ssh_user']))
-        print("Rstudio pass: {}".format(notebook_config['rstudio_pass']))
-        print("Ungit URL: {}".format(ungit_ip_url))
-        print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.
+        logging.info("Instance name: {}".format(notebook_config['instance_name']))
+        logging.info("Private IP: {}".format(ip_address))
+        logging.info("Instance type: {}".format(notebook_config['instance_size']))
+        logging.info("Key name: {}".format(notebook_config['key_name']))
+        logging.info("User key name: {}".format(notebook_config['user_keyname']))
+        logging.info("SG name: {}".format(notebook_config['security_group_name']))
+        logging.info("Rstudio URL: {}".format(rstudio_ip_url))
+        logging.info("Rstudio user: {}".format(notebook_config['datalab_ssh_user']))
+        logging.info("Rstudio pass: {}".format(notebook_config['rstudio_pass']))
+        logging.info("Ungit URL: {}".format(ungit_ip_url))
+        logging.info('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.
               format(notebook_config['key_name'], notebook_config['datalab_ssh_user'], ip_address))
 
         with open("/root/result.json", 'w') as result:
diff --git a/infrastructure-provisioning/src/general/scripts/azure/ssn_configure.py b/infrastructure-provisioning/src/general/scripts/azure/ssn_configure.py
index 7c4e802..ee959e6 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/ssn_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/ssn_configure.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -34,12 +34,6 @@ import uuid
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}.log".format(os.environ['conf_resource'], os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     def clear_resources():
         AzureActions.remove_instance(ssn_conf['resource_group_name'], ssn_conf['instance_name'])
         for datalake in AzureMeta.list_datalakes(ssn_conf['resource_group_name']):
@@ -63,7 +57,6 @@ if __name__ == "__main__":
         ssn_conf['instance'] = 'ssn'
 
         logging.info('[DERIVING NAMES]')
-        print('[DERIVING NAMES]')
 
         ssn_conf['billing_enabled'] = True
         # We need to cut service_base_name to 20 symbols do to the Azure Name length limitation
@@ -140,7 +133,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format \
             (ssn_conf['instance_host'], ssn_conf['ssh_key_path'], ssn_conf['initial_user'],
              ssn_conf['datalab_ssh_user'],
@@ -154,7 +146,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[INSTALLING PREREQUISITES TO SSN INSTANCE]')
-        print('[INSTALLING PREREQUISITES TO SSN INSTANCE]')
         params = "--hostname {} --keyfile {} --pip_packages 'backoff bcrypt==3.1.7 argparse fabric==1.14.0 pymongo pyyaml " \
                  "pycryptodome azure==2.0.0' --user {} --region {}".format(ssn_conf['instance_host'],
                                                                        ssn_conf['ssh_key_path'],
@@ -169,7 +160,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CONFIGURE SSN INSTANCE]')
-        print('[CONFIGURE SSN INSTANCE]')
         additional_config = {"nginx_template_dir": "/root/templates/",
                              "service_base_name": ssn_conf['service_base_name'],
                              "security_group_id": ssn_conf['security_group_name'], "vpc_id": ssn_conf['vpc_name'],
@@ -188,7 +178,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CONFIGURING DOCKER AT SSN INSTANCE]')
-        print('[CONFIGURING DOCKER AT SSN INSTANCE]')
         additional_config = [{"name": "base", "tag": "latest"},
                              {"name": "edge", "tag": "latest"},
                              {"name": "project", "tag": "latest"},
@@ -213,7 +202,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CONFIGURE SSN INSTANCE UI]')
-        print('[CONFIGURE SSN INSTANCE UI]')
         ssn_conf['azure_auth_path'] = '/home/{}/keys/azure_auth.json'.format(ssn_conf['datalab_ssh_user'])
         ssn_conf['ldap_login'] = 'false'
 
@@ -523,7 +511,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     logging.info('[CREATE KEYCLOAK CLIENT]')
-    print('[CREATE KEYCLOAK CLIENT]')
     keycloak_params = "--service_base_name {} --keycloak_auth_server_url {} --keycloak_realm_name {} " \
                       "--keycloak_user {} --keycloak_user_password {} --instance_public_ip {} --keycloak_client_secret {} " \
         .format(ssn_conf['service_base_name'], os.environ['keycloak_auth_server_url'],
@@ -539,41 +526,39 @@ if __name__ == "__main__":
 
     try:
         logging.info('[SUMMARY]')
-
-        print('[SUMMARY]')
-        print("Service base name: {}".format(ssn_conf['service_base_name']))
-        print("SSN Name: {}".format(ssn_conf['instance_name']))
+        logging.info("Service base name: {}".format(ssn_conf['service_base_name']))
+        logging.info("SSN Name: {}".format(ssn_conf['instance_name']))
         if os.environ['conf_network_type'] == 'public':
-            print("SSN Public IP address: {}".format(ssn_conf['instnace_ip']))
-            print("SSN Hostname: {}".format(ssn_conf['instance_dns_name']))
+            logging.info("SSN Public IP address: {}".format(ssn_conf['instnace_ip']))
+            logging.info("SSN Hostname: {}".format(ssn_conf['instance_dns_name']))
         else:
-            print("SSN Private IP address: {}".format(ssn_conf['instnace_ip']))
-        print("Key name: {}".format(os.environ['conf_key_name']))
-        print("VPC Name: {}".format(ssn_conf['vpc_name']))
-        print("Subnet Name: {}".format(ssn_conf['subnet_name']))
-        print("Security groups Names: {}".format(ssn_conf['security_group_name']))
-        print("SSN instance size: {}".format(os.environ['azure_ssn_instance_size']))
+            logging.info("SSN Private IP address: {}".format(ssn_conf['instnace_ip']))
+        logging.info("Key name: {}".format(os.environ['conf_key_name']))
+        logging.info("VPC Name: {}".format(ssn_conf['vpc_name']))
+        logging.info("Subnet Name: {}".format(ssn_conf['subnet_name']))
+        logging.info("Security groups Names: {}".format(ssn_conf['security_group_name']))
+        logging.info("SSN instance size: {}".format(os.environ['azure_ssn_instance_size']))
         ssn_conf['datalake_store_full_name'] = 'None'
         if os.environ['azure_datalake_enable'] == 'true':
             for datalake in AzureMeta.list_datalakes(ssn_conf['resource_group_name']):
                 if ssn_conf['datalake_store_name'] == datalake.tags["Name"]:
                     ssn_conf['datalake_store_full_name'] = datalake.name
-                    print("DataLake store name: {}".format(ssn_conf['datalake_store_full_name']))
-            print("DataLake shared directory name: {}".format(ssn_conf['datalake_shared_directory_name']))
-        print("Region: {}".format(ssn_conf['region']))
+                    logging.info("DataLake store name: {}".format(ssn_conf['datalake_store_full_name']))
+            logging.info("DataLake shared directory name: {}".format(ssn_conf['datalake_shared_directory_name']))
+        logging.info("Region: {}".format(ssn_conf['region']))
         jenkins_url = "http://{}/jenkins".format(ssn_conf['instance_host'])
         jenkins_url_https = "https://{}/jenkins".format(ssn_conf['instance_host'])
-        print("Jenkins URL: {}".format(jenkins_url))
-        print("Jenkins URL HTTPS: {}".format(jenkins_url_https))
-        print("DataLab UI HTTP URL: http://{}".format(ssn_conf['instance_host']))
-        print("DataLab UI HTTPS URL: https://{}".format(ssn_conf['instance_host']))
+        logging.info("Jenkins URL: {}".format(jenkins_url))
+        logging.info("Jenkins URL HTTPS: {}".format(jenkins_url_https))
+        logging.info("DataLab UI HTTP URL: http://{}".format(ssn_conf['instance_host']))
+        logging.info("DataLab UI HTTPS URL: https://{}".format(ssn_conf['instance_host']))
 
         try:
             with open('jenkins_creds.txt') as f:
-                print(f.read())
+                logging.info(f.read())
         except Exception as err:
-            print('Error: {0}'.format(err))
-            print("Jenkins is either configured already or have issues in configuration routine.")
+            logging.info('Error: {0}'.format(err))
+            logging.info("Jenkins is either configured already or have issues in configuration routine.")
 
         with open("/root/result.json", 'w') as f:
             if os.environ['azure_datalake_enable'] == 'false':
@@ -602,7 +587,7 @@ if __name__ == "__main__":
                        "action": "Create SSN instance"}
             f.write(json.dumps(res))
 
-        print('Upload response file')
+        logging.info('Upload response file')
         params = "--instance_name {} --local_log_filepath {} --os_user {} --instance_hostname {}". \
             format(ssn_conf['instance_name'], local_log_filepath, ssn_conf['datalab_ssh_user'], ssn_conf['instnace_ip'])
         subprocess.run("~/scripts/{}.py {}".format('upload_response_file', params), shell=True, check=True)
diff --git a/infrastructure-provisioning/src/general/scripts/azure/ssn_create_datalake.py b/infrastructure-provisioning/src/general/scripts/azure/ssn_create_datalake.py
index 471d78c..228c40d 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/ssn_create_datalake.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/ssn_create_datalake.py
@@ -27,6 +27,7 @@ import sys
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--datalake_name', type=str, default='')
@@ -42,14 +43,14 @@ if __name__ == "__main__":
         for datalake in AzureMeta().list_datalakes(args.resource_group_name):
             if datalake["Name"] == datalake.tags["Name"]:
                 check_datalake = True
-                print("REQUESTED DATA LAKE {} ALREADY EXISTS".format(datalake.name))
+                logging.info("REQUESTED DATA LAKE {} ALREADY EXISTS".format(datalake.name))
         if not check_datalake:
             datalake_name = id_generator().lower()
-            print("Creating DataLake {}.".format(datalake_name))
+            logging.info("Creating DataLake {}.".format(datalake_name))
             datalake = AzureActions().create_datalake_store(args.resource_group_name, datalake_name, args.region,
                                                             datalake_tags)
-            print("DATA LAKE {} has been created".format(datalake_name))
+            logging.info("DATA LAKE {} has been created".format(datalake_name))
 
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.info('Error: {0}'.format(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/azure/ssn_create_peering.py b/infrastructure-provisioning/src/general/scripts/azure/ssn_create_peering.py
index a827c0a..be956ba 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/ssn_create_peering.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/ssn_create_peering.py
@@ -26,6 +26,7 @@ import sys
 import time
 from datalab.actions_lib import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--source_resource_group_name', type=str, default='')
@@ -49,7 +50,7 @@ if __name__ == "__main__":
             args.source_virtual_network_name,
         ).id
 
-        print("Creating Virtual Network peering {} and {}".format(source_virtual_network_peering_name, destination_virtual_network_peering_name))
+        logging.info("Creating Virtual Network peering {} and {}".format(source_virtual_network_peering_name, destination_virtual_network_peering_name))
         AzureActions().create_virtual_network_peerings(
                 args.source_resource_group_name,
                 args.source_virtual_network_name,
@@ -62,5 +63,5 @@ if __name__ == "__main__":
                 source_vnet_id)
         time.sleep(250)
     except Exception as err:
-        print("Error creating vpc peering: " + str(err))
+        logging.error("Error creating vpc peering: " + str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/azure/ssn_create_resource_group.py b/infrastructure-provisioning/src/general/scripts/azure/ssn_create_resource_group.py
index 4463350..e8e809a 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/ssn_create_resource_group.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/ssn_create_resource_group.py
@@ -24,6 +24,7 @@
 import argparse
 from datalab.actions_lib import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--resource_group_name', type=str, default='')
@@ -33,10 +34,10 @@ args = parser.parse_args()
 if __name__ == "__main__":
     if args.resource_group_name != '':
         if AzureMeta().get_resource_group(args.resource_group_name):
-            print("REQUESTED RESOURCE GROUP {} EXISTS".format(args.resource_group_name))
+            logging.info("REQUESTED RESOURCE GROUP {} EXISTS".format(args.resource_group_name))
         else:
-            print("Creating Resource Group {}".format(args.resource_group_name))
+            logging.info("Creating Resource Group {}".format(args.resource_group_name))
             AzureActions().create_resource_group(args.resource_group_name, args.region)
     else:
-        print("Resource group name can't be empty.")
+        logging.error("Resource group name can't be empty.")
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/azure/ssn_create_vpc.py b/infrastructure-provisioning/src/general/scripts/azure/ssn_create_vpc.py
index 015bace..abfca0d 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/ssn_create_vpc.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/ssn_create_vpc.py
@@ -24,6 +24,7 @@
 import argparse
 from datalab.actions_lib import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--vpc_name', type=str, default='')
@@ -35,10 +36,10 @@ args = parser.parse_args()
 if __name__ == "__main__":
     if args.vpc_name != '':
         if AzureMeta().get_vpc(args.resource_group_name, args.vpc_name):
-            print("REQUESTED VIRTUAL NETWORK {} EXISTS".format(args.vpc_name))
+            logging.info("REQUESTED VIRTUAL NETWORK {} EXISTS".format(args.vpc_name))
         else:
-            print("Creating Virtual Network {}".format(args.vpc_name))
+            logging.info("Creating Virtual Network {}".format(args.vpc_name))
             AzureActions().create_vpc(args.resource_group_name, args.vpc_name, args.region, args.vpc_cidr)
     else:
-        print("VPC name can't be empty.")
+        logging.error("VPC name can't be empty.")
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/azure/ssn_prepare.py b/infrastructure-provisioning/src/general/scripts/azure/ssn_prepare.py
index 38040b6..42a5799 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/ssn_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/ssn_prepare.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -34,11 +34,6 @@ from Crypto.PublicKey import RSA
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}.log".format(os.environ['conf_resource'], os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         AzureMeta = datalab.meta_lib.AzureMeta()
         AzureActions = datalab.actions_lib.AzureActions()
@@ -46,7 +41,7 @@ if __name__ == "__main__":
         ssn_conf['instance'] = 'ssn'
 
         logging.info('[DERIVING NAMES]')
-        print('[DERIVING NAMES]')
+        logging.info('[DERIVING NAMES]')
         # Verify vpc deployment
         if os.environ['conf_network_type'] == 'private' and not os.environ.get('azure_vpc_name') \
                 and not os.environ.get('azure_source_vpc_name'):
@@ -103,10 +98,8 @@ if __name__ == "__main__":
     try:
         if 'azure_resource_group_name' in os.environ:
             logging.info('Resource group predefined')
-            print('Resource group predefined')
         else:
             logging.info('[CREATING RESOURCE GROUP]')
-            print("[CREATING RESOURCE GROUP]")
             params = "--resource_group_name {} --region {}".format(ssn_conf['resource_group_name'], ssn_conf['region'])
             subprocess.run("~/scripts/{}.py {}".format('ssn_create_resource_group', params), shell=True, check=True)
     except Exception as err:
@@ -117,10 +110,8 @@ if __name__ == "__main__":
     try:
         if 'azure_vpc_name' in os.environ:
             logging.info('VPC predefined')
-            print('VPC predefined')
         else:
             logging.info('[CREATING VIRTUAL NETWORK]')
-            print("[CREATING VIRTUAL NETWORK]")
             params = "--resource_group_name {} --vpc_name {} --region {} --vpc_cidr {}".format(
                 ssn_conf['resource_group_name'], ssn_conf['vpc_name'], ssn_conf['region'], ssn_conf['vpc_cidr'])
             subprocess.run("~/scripts/{}.py {}".format('ssn_create_vpc', params), shell=True, check=True)
@@ -137,10 +128,8 @@ if __name__ == "__main__":
     try:
         if 'azure_subnet_name' in os.environ:    
             logging.info('Subnet predefined')
-            print('Subnet predefined')
         else:
             logging.info('[CREATING SUBNET]')
-            print("[CREATING SUBNET]")
             params = "--resource_group_name {} --vpc_name {} --region {} --vpc_cidr {} --subnet_name {} --prefix {}".\
                 format(ssn_conf['resource_group_name'], ssn_conf['vpc_name'], ssn_conf['region'],
                        ssn_conf['vpc_cidr'], ssn_conf['subnet_name'], ssn_conf['subnet_prefix'])
@@ -154,14 +143,13 @@ if __name__ == "__main__":
             if 'azure_resource_group_name' not in os.environ:
                 AzureActions.remove_resource_group(ssn_conf['resource_group_name'], ssn_conf['region'])
         except Exception as err:
-            print("Resources hasn't been removed: {}".format(str(err)))
+            logging.info("Resources hasn't been removed: {}".format(str(err)))
             datalab.fab.append_result("Resources hasn't been removed.", str(err))
         sys.exit(1)
     
     try:
         if 'azure_vpc_name' not in os.environ and os.environ['conf_network_type'] == 'private':
             logging.info('[CREATING VPC PEERING]')
-            print("[CREATING VPC PEERING]")
             params = "--source_resource_group_name {} --destination_resource_group_name {} " \
                      "--source_virtual_network_name {} --destination_virtual_network_name {}".format(
                       ssn_conf['source_resource_group_name'], ssn_conf['resource_group_name'],
@@ -175,7 +163,7 @@ if __name__ == "__main__":
             if 'azure_resource_group_name' not in os.environ:
                 AzureActions.remove_resource_group(ssn_conf['resource_group_name'], ssn_conf['region'])
         except Exception as err:
-            print("Resources hasn't been removed: " + str(err))
+            logging.info("Resources hasn't been removed: " + str(err))
             datalab.fab.append_result("Resources hasn't been removed.", str(err))
         datalab.fab.append_result("Failed to create VPC peering.", str(err))
         sys.exit(1)
@@ -183,10 +171,8 @@ if __name__ == "__main__":
     try:
         if 'azure_security_group_name' in os.environ:
             logging.info('Security group predefined')
-            print('Security group predefined')
         else:
             logging.info('[CREATING SECURITY GROUP]')
-            print("[CREATING SECURITY GROUP]")
             list_rules = [
                 {
                     "name": "in-1",
@@ -249,14 +235,13 @@ if __name__ == "__main__":
             if 'azure_resource_group_name' not in os.environ:
                 AzureActions.remove_resource_group(ssn_conf['resource_group_name'], ssn_conf['region'])
         except Exception as err:
-            print("Resources hasn't been removed: " + str(err))
+            logging.info("Resources hasn't been removed: " + str(err))
             datalab.fab.append_result("Resources hasn't been removed.", str(err))
         sys.exit(1)
 
     if os.environ['azure_datalake_enable'] == 'true':
         try:
             logging.info('[CREATE DATA LAKE STORE]')
-            print('[CREATE DATA LAKE STORE]')
             params = "--datalake_name {} --datalake_tags '{}' --resource_group_name {} --region {}". \
                      format(ssn_conf['datalake_store_name'], json.dumps(ssn_conf['datalake_store_tags']),
                             ssn_conf['resource_group_name'], ssn_conf['region'])
@@ -267,7 +252,6 @@ if __name__ == "__main__":
                 raise Exception
 
             logging.info('[CREATE DATA LAKE SHARED DIRECTORY]')
-            print('[CREATE DATA LAKE SHARED DIRECTORY]')
             params = "--resource_group_name {} --datalake_name {} --directory_name {} --service_base_name {} --ad_group {}". \
                 format(ssn_conf['resource_group_name'], ssn_conf['datalake_store_name'],
                        ssn_conf['datalake_shared_directory_name'], ssn_conf['service_base_name'],
@@ -303,7 +287,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATE SSN INSTANCE]')
-        print('[CREATE SSN INSTANCE]')
         params = "--instance_name {} --instance_size {} --region {} --vpc_name {} --network_interface_name {} \
             --security_group_name {} --subnet_name {} --service_base_name {} --resource_group_name {} \
             --datalab_ssh_user_name {} --public_ip_name {} --public_key '''{}''' --primary_disk_size {} \
@@ -321,7 +304,7 @@ if __name__ == "__main__":
         try:
             AzureActions.remove_instance(ssn_conf['resource_group_name'], ssn_conf['instance_name'])
         except:
-            print("The instance {} hasn't been created".format(ssn_conf['instance_name']))
+            logging.info("The instance {} hasn't been created".format(ssn_conf['instance_name']))
         for datalake in AzureMeta.list_datalakes(ssn_conf['resource_group_name']):
             if ssn_conf['datalake_store_name'] == datalake.tags["Name"]:
                 AzureActions.delete_datalake_store(ssn_conf['resource_group_name'], datalake.name)
diff --git a/infrastructure-provisioning/src/general/scripts/azure/ssn_terminate.py b/infrastructure-provisioning/src/general/scripts/azure/ssn_terminate.py
index 0423aac..865ad37 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/ssn_terminate.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/ssn_terminate.py
@@ -26,7 +26,7 @@ import datalab.fab
 import datalab.meta_lib
 import datalab.ssn_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -34,99 +34,99 @@ from fabric import *
 
 
 def terminate_ssn_node(resource_group_name, service_base_name, vpc_name, region):
-    print("Terminating instances")
+    logging.info("Terminating instances")
     try:
         for vm in AzureMeta.compute_client.virtual_machines.list(resource_group_name):
             if "SBN" in vm.tags and service_base_name == vm.tags["SBN"]:
                 AzureActions.remove_instance(resource_group_name, vm.name)
-                print("Instance {} has been terminated".format(vm.name))
+                logging.info("Instance {} has been terminated".format(vm.name))
     except Exception as err:
         datalab.fab.append_result("Failed to terminate instances", str(err))
         sys.exit(1)
 
-    print("Removing network interfaces")
+    logging.info("Removing network interfaces")
     try:
         for network_interface in AzureMeta.list_network_interfaces(resource_group_name):
             if "SBN" in network_interface.tags and service_base_name == network_interface.tags["SBN"]:
                 AzureActions.delete_network_if(resource_group_name, network_interface.name)
-                print("Network interface {} has been removed".format(network_interface.name))
+                logging.info("Network interface {} has been removed".format(network_interface.name))
     except Exception as err:
         datalab.fab.append_result("Failed to remove network interfaces", str(err))
         sys.exit(1)
 
-    print("Removing static public IPs")
+    logging.info("Removing static public IPs")
     try:
         for static_public_ip in AzureMeta.list_static_ips(resource_group_name):
             if "SBN" in static_public_ip.tags and service_base_name == static_public_ip.tags["SBN"]:
                 AzureActions.delete_static_public_ip(resource_group_name, static_public_ip.name)
-                print("Static public IP {} has been removed".format(static_public_ip.name))
+                logging.info("Static public IP {} has been removed".format(static_public_ip.name))
     except Exception as err:
         datalab.fab.append_result("Failed to remove static IPs", str(err))
         sys.exit(1)
 
-    print("Removing disks")
+    logging.info("Removing disks")
     try:
         for disk in AzureMeta.list_disks(resource_group_name):
             if "SBN" in disk.tags and service_base_name == disk.tags["SBN"]:
                 AzureActions.remove_disk(resource_group_name, disk.name)
-                print("Disk {} has been removed".format(disk.name))
+                logging.info("Disk {} has been removed".format(disk.name))
     except Exception as err:
         datalab.fab.append_result("Failed to remove disks", str(err))
         sys.exit(1)
 
-    print("Removing storage accounts")
+    logging.info("Removing storage accounts")
     try:
         for storage_account in AzureMeta.list_storage_accounts(resource_group_name):
             if "SBN" in storage_account.tags and service_base_name == storage_account.tags["SBN"]:
                 AzureActions.remove_storage_account(resource_group_name, storage_account.name)
-                print("Storage account {} has been terminated".format(storage_account.name))
+                logging.info("Storage account {} has been terminated".format(storage_account.name))
     except Exception as err:
         datalab.fab.append_result("Failed to remove storage accounts", str(err))
         sys.exit(1)
 
-    print("Removing Data Lake Store")
+    logging.info("Removing Data Lake Store")
     try:
         for datalake in AzureMeta.list_datalakes(resource_group_name):
             if "SBN" in datalake.tags and service_base_name == datalake.tags["SBN"]:
                 AzureActions.delete_datalake_store(resource_group_name, datalake.name)
-                print("Data Lake Store {} has been terminated".format(datalake.name))
+                logging.info("Data Lake Store {} has been terminated".format(datalake.name))
     except Exception as err:
         datalab.fab.append_result("Failed to remove Data Lake", str(err))
         sys.exit(1)
 
-    print("Removing images")
+    logging.info("Removing images")
     try:
         for image in AzureMeta.list_images():
             if "SBN" in image.tags and service_base_name == image.tags["SBN"]:
                 AzureActions.remove_image(resource_group_name, image.name)
-                print("Image {} has been removed".format(image.name))
+                logging.info("Image {} has been removed".format(image.name))
     except Exception as err:
         datalab.fab.append_result("Failed to remove images", str(err))
         sys.exit(1)
 
-    print("Removing security groups")
+    logging.info("Removing security groups")
     try:
         for sg in AzureMeta.network_client.network_security_groups.list(resource_group_name):
             if "SBN" in sg.tags and service_base_name == sg.tags["SBN"]:
                 AzureActions.remove_security_group(resource_group_name, sg.name)
-                print("Security group {} has been terminated".format(sg.name))
+                logging.info("Security group {} has been terminated".format(sg.name))
     except Exception as err:
         datalab.fab.append_result("Failed to remove security groups", str(err))
         sys.exit(1)
 
     if 'azure_vpc_name' in os.environ:
-        print("Removing subnets in predefined VPC")
+        logging.info("Removing subnets in predefined VPC")
         try:
             for subnet in AzureMeta.list_subnets(resource_group_name, os.environ['azure_vpc_name']):
                 subnet_name = str(subnet)[str(subnet).find("'name': '") + 9 : str(subnet).find("', 'etag':")]
                 if service_base_name in subnet_name:
                     AzureActions.remove_subnet(resource_group_name, os.environ['azure_vpc_name'], subnet_name)
-                    print("Subnet {} has been removed from VPC {}".format(subnet_name, os.environ['azure_vpc_name']))
+                    logging.info("Subnet {} has been removed from VPC {}".format(subnet_name, os.environ['azure_vpc_name']))
         except Exception as err:
             datalab.fab.append_result("Failed to remove subnets in predefined VPC", str(err))
             sys.exit(1)
 
-    print("Removing rules in predefined edge security group")
+    logging.info("Removing rules in predefined edge security group")
     try:
         if 'azure_edge_security_group_name' in os.environ:
             for rule in AzureMeta.list_security_group_rules(resource_group_name, os.environ['azure_edge_security_group_name']):
@@ -134,31 +134,30 @@ def terminate_ssn_node(resource_group_name, service_base_name, vpc_name, region)
                 if service_base_name in rule_name:
                     AzureActions.remove_security_rules(os.environ['azure_edge_security_group_name'],
                                                resource_group_name, rule_name)
-                    print("Rule {} is removed".format(rule_name))
+                    logging.info("Rule {} is removed".format(rule_name))
     except Exception as err:
         datalab.fab.append_result("Failed to remove rules in predefined edge security group", str(err))
         sys.exit(1)
 
-    print("Removing VPC")
+    logging.info("Removing VPC")
     try:
         if AzureMeta.get_vpc(resource_group_name, service_base_name + '-vpc'):
             AzureActions.remove_vpc(resource_group_name, vpc_name)
-            print("VPC {} has been terminated".format(vpc_name))
+            logging.info("VPC {} has been terminated".format(vpc_name))
     except Exception as err:
         datalab.fab.append_result("Failed to remove VPC", str(err))
         sys.exit(1)
 
-    print("Removing Resource Group")
+    logging.info("Removing Resource Group")
     try:
         if AzureMeta.get_resource_group(resource_group_name) and resource_group_name == '{}-resource-group'.format(service_base_name):
             AzureActions.remove_resource_group(resource_group_name, region)
-            print("Resource group {} has been terminated".format(resource_group_name))
+            logging.info("Resource group {} has been terminated".format(resource_group_name))
     except Exception as err:
         datalab.fab.append_result("Failed to remove resource group", str(err))
         sys.exit(1)
 
     try:
-        print('[KEYCLOAK SSN CLIENT DELETE]')
         logging.info('[KEYCLOAK SSN CLIENT DELETE]')
         keycloak_auth_server_url = '{}/realms/master/protocol/openid-connect/token'.format(
             os.environ['keycloak_auth_server_url'])
@@ -193,19 +192,14 @@ def terminate_ssn_node(resource_group_name, service_base_name, vpc_name, region)
             headers={"Authorization": "Bearer {}".format(keycloak_token.get("access_token")),
                      "Content-Type": "application/json"})
     except Exception as err:
-        print("Failed to remove ssn client from Keycloak", str(err))
+        logging.info("Failed to remove ssn client from Keycloak", str(err))
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}.log".format(os.environ['conf_resource'], os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     # generating variables dictionary
     AzureMeta = datalab.meta_lib.AzureMeta()
     AzureActions = datalab.actions_lib.AzureActions()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     ssn_conf = dict()
     ssn_conf['service_base_name'] = datalab.fab.replace_multi_symbols(os.environ['conf_service_base_name'][:20],
                                                                       '-', True)
@@ -216,7 +210,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[TERMINATE SSN]')
-        print('[TERMINATE SSN]')
         try:
             terminate_ssn_node(ssn_conf['resource_group_name'], ssn_conf['service_base_name'], ssn_conf['vpc_name'],
                                ssn_conf['region'])
@@ -231,7 +224,7 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"service_base_name": ssn_conf['service_base_name'],
                    "Action": "Terminate ssn with all service_base_name environment"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/azure/tensor_configure.py b/infrastructure-provisioning/src/general/scripts/azure/tensor_configure.py
index 137f355..e3f8fec 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/tensor_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/tensor_configure.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -33,14 +33,6 @@ import subprocess
 from fabric import *
 
 if __name__ == "__main__":
-    instance_class = 'notebook'
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     try:
         AzureMeta = datalab.meta_lib.AzureMeta()
         AzureActions = datalab.actions_lib.AzureActions()
@@ -136,7 +128,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             instance_hostname, os.environ['conf_key_dir'] + os.environ['conf_key_name'] + ".pem",
             notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
@@ -154,7 +145,6 @@ if __name__ == "__main__":
     # configuring proxy on Notebook instance
     try:
         logging.info('[CONFIGURE PROXY ON TENSOR INSTANCE]')
-        print('[CONFIGURE PROXY ON TENSOR INSTANCE]')
         additional_config = {"proxy_host": edge_instance_private_hostname, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}"\
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
@@ -172,7 +162,6 @@ if __name__ == "__main__":
     # updating repositories & installing python packages
     try:
         logging.info('[INSTALLING PREREQUISITES TO TENSOR NOTEBOOK INSTANCE]')
-        print('[INSTALLING PREREQUISITES TO TENSOR NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}". \
             format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['azure_region'],
                    edge_instance_private_hostname)
@@ -189,7 +178,6 @@ if __name__ == "__main__":
     # installing and configuring TensorFlow and all dependencies
     try:
         logging.info('[CONFIGURE TENSORFLOW NOTEBOOK INSTANCE]')
-        print('[CONFIGURE TENSORFLOW NOTEBOOK INSTANCE]')
         params = "--hostname {0} --keyfile {1} " \
                  "--region {2} --os_user {3} " \
                  "--ip_address {4} --exploratory_name {5} --edge_ip {6}" \
@@ -209,7 +197,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": notebook_config['user_keyname'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -226,7 +213,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
@@ -242,7 +228,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[POST CONFIGURING PROCESS]')
-        print('[POST CONFIGURING PROCESS')
         if notebook_config['notebook_image_name'] not in [notebook_config['expected_image_name'], 'None', '']:
             params = "--hostname {} --keyfile {} --os_user {} --resource_group_name {} --notebook_name {}" \
                 .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
@@ -259,11 +244,11 @@ if __name__ == "__main__":
 
     if notebook_config['image_enabled'] == 'true':
         try:
-            print('[CREATING IMAGE]')
+            logging.info('[CREATING IMAGE]')
             image = AzureMeta.get_image(notebook_config['resource_group_name'],
                                         notebook_config['expected_image_name'])
             if image == '':
-                print("Looks like it's first time we configure notebook server. Creating image.")
+                logging.info("Looks like it's first time we configure notebook server. Creating image.")
                 datalab.actions_lib.prepare_vm_for_image(True, notebook_config['datalab_ssh_user'], instance_hostname,
                                                          keyfile_name)
                 AzureActions.create_image_from_instance(notebook_config['resource_group_name'],
@@ -271,7 +256,7 @@ if __name__ == "__main__":
                                                         os.environ['azure_region'],
                                                         notebook_config['expected_image_name'],
                                                         json.dumps(notebook_config['image_tags']))
-                print("Image was successfully created.")
+                logging.info("Image was successfully created.")
                 subprocess.run("~/scripts/{}.py".format('common_prepare_notebook'), shell=True, check=True)
                 instance_running = False
                 while not instance_running:
@@ -295,7 +280,6 @@ if __name__ == "__main__":
             sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         additional_info = {
             'instance_hostname': instance_hostname,
@@ -336,19 +320,18 @@ if __name__ == "__main__":
             notebook_config['exploratory_name'])
         jupyter_ungit_access_url = "https://" + edge_instance_hostname + "/{}-ungit/".format(
             notebook_config['exploratory_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['instance_name']))
-        print("Private IP: {}".format(ip_address))
-        print("Instance type: {}".format(notebook_config['instance_size']))
-        print("Key name: {}".format(notebook_config['key_name']))
-        print("User key name: {}".format(notebook_config['user_keyname']))
-        print("SG name: {}".format(notebook_config['security_group_name']))
-        print("TensorBoard URL: {}".format(tensorboard_url))
-        print("TensorBoard log dir: /var/log/tensorboard")
-        print("Jupyter URL: {}".format(jupyter_ip_url))
-        print("Ungit URL: {}".format(ungit_ip_url))
-        print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.
+        logging.info("Instance name: {}".format(notebook_config['instance_name']))
+        logging.info("Private IP: {}".format(ip_address))
+        logging.info("Instance type: {}".format(notebook_config['instance_size']))
+        logging.info("Key name: {}".format(notebook_config['key_name']))
+        logging.info("User key name: {}".format(notebook_config['user_keyname']))
+        logging.info("SG name: {}".format(notebook_config['security_group_name']))
+        logging.info("TensorBoard URL: {}".format(tensorboard_url))
+        logging.info("TensorBoard log dir: /var/log/tensorboard")
+        logging.info("Jupyter URL: {}".format(jupyter_ip_url))
+        logging.info("Ungit URL: {}".format(ungit_ip_url))
+        logging.info('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.
               format(notebook_config['key_name'], notebook_config['datalab_ssh_user'], ip_address))
 
         with open("/root/result.json", 'w') as result:
diff --git a/infrastructure-provisioning/src/general/scripts/azure/zeppelin_configure.py b/infrastructure-provisioning/src/general/scripts/azure/zeppelin_configure.py
index 536955b..c2b9b01 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/zeppelin_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/zeppelin_configure.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -34,12 +34,6 @@ from fabric import *
 
 if __name__ == "__main__":
     instance_class = 'notebook'
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         AzureMeta = datalab.meta_lib.AzureMeta()
         AzureActions = datalab.actions_lib.AzureActions()
@@ -135,7 +129,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             instance_hostname, os.environ['conf_key_dir'] + os.environ['conf_key_name'] + ".pem",
             notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
@@ -153,7 +146,6 @@ if __name__ == "__main__":
     # configuring proxy on Notebook instance
     try:
         logging.info('[CONFIGURE PROXY ON ZEPPELIN INSTANCE]')
-        print('[CONFIGURE PROXY ON ZEPPELIN INSTANCE]')
         additional_config = {"proxy_host": edge_instance_private_hostname, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}" \
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
@@ -171,7 +163,6 @@ if __name__ == "__main__":
     # updating repositories & installing python packages
     try:
         logging.info('[INSTALLING PREREQUISITES TO ZEPPELIN NOTEBOOK INSTANCE]')
-        print('[INSTALLING PREREQUISITES TO ZEPPELIN NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}" \
             .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['azure_region'],
                     edge_instance_private_hostname)
@@ -188,7 +179,6 @@ if __name__ == "__main__":
     # installing and configuring zeppelin and all dependencies
     try:
         logging.info('[CONFIGURE ZEPPELIN NOTEBOOK INSTANCE]')
-        print('[CONFIGURE ZEPPELIN NOTEBOOK INSTANCE]')
         additional_config = {"frontend_hostname": edge_instance_private_hostname,
                              "backend_hostname": instance_hostname,
                              "backend_port": "8080",
@@ -222,7 +212,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": notebook_config['user_keyname'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -239,7 +228,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
@@ -255,7 +243,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[POST CONFIGURING PROCESS]')
-        print('[POST CONFIGURING PROCESS')
         if notebook_config['notebook_image_name'] not in [notebook_config['expected_image_name'], 'None', '']:
             params = "--hostname {} --keyfile {} --os_user {} --resource_group_name {} --notebook_name {}" \
                 .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
@@ -272,11 +259,11 @@ if __name__ == "__main__":
 
     if notebook_config['image_enabled'] == 'true':
         try:
-            print('[CREATING IMAGE]')
+            logging.info('[CREATING IMAGE]')
             image = AzureMeta.get_image(notebook_config['resource_group_name'],
                                         notebook_config['expected_image_name'])
             if image == '':
-                print("Looks like it's first time we configure notebook server. Creating image.")
+                logging.info("Looks like it's first time we configure notebook server. Creating image.")
                 datalab.actions_lib.prepare_vm_for_image(True, notebook_config['datalab_ssh_user'], instance_hostname,
                                                          keyfile_name)
                 AzureActions.create_image_from_instance(notebook_config['resource_group_name'],
@@ -284,7 +271,7 @@ if __name__ == "__main__":
                                                         os.environ['azure_region'],
                                                         notebook_config['expected_image_name'],
                                                         json.dumps(notebook_config['image_tags']))
-                print("Image was successfully created.")
+                logging.info("Image was successfully created.")
                 subprocess.run("~/scripts/{}.py".format('common_prepare_notebook'), shell=True, check=True)
                 instance_running = False
                 while not instance_running:
@@ -308,7 +295,6 @@ if __name__ == "__main__":
             sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         additional_info = {
             'instance_hostname': instance_hostname,
@@ -346,17 +332,16 @@ if __name__ == "__main__":
             notebook_config['exploratory_name'])
         zeppelin_ungit_access_url = "https://" + edge_instance_hostname + "/{}-ungit/".format(
             notebook_config['exploratory_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['instance_name']))
-        print("Private IP: {}".format(ip_address))
-        print("Instance type: {}".format(notebook_config['instance_size']))
-        print("Key name: {}".format(notebook_config['key_name']))
-        print("User key name: {}".format(notebook_config['user_keyname']))
-        print("SG name: {}".format(notebook_config['security_group_name']))
-        print("Zeppelin URL: {}".format(zeppelin_ip_url))
-        print("Ungit URL: {}".format(ungit_ip_url))
-        print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.
+        logging.info("Instance name: {}".format(notebook_config['instance_name']))
+        logging.info("Private IP: {}".format(ip_address))
+        logging.info("Instance type: {}".format(notebook_config['instance_size']))
+        logging.info("Key name: {}".format(notebook_config['key_name']))
+        logging.info("User key name: {}".format(notebook_config['user_keyname']))
+        logging.info("SG name: {}".format(notebook_config['security_group_name']))
+        logging.info("Zeppelin URL: {}".format(zeppelin_ip_url))
+        logging.info("Ungit URL: {}".format(ungit_ip_url))
+        logging.info('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.
               format(notebook_config['key_name'], notebook_config['datalab_ssh_user'], ip_address))
 
         with open("/root/result.json", 'w') as result:

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org


[incubator-datalab] 01/04: [DATALAB-2409]: replaced print with logging in all src/ .py scripts except in fabfiles and general/

Posted by lf...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

lfrolov pushed a commit to branch DATALAB-2409
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git

commit 1638c1d715a2c8200afcd9b2660184965639f6aa
Author: leonidfrolov <fr...@gmail.com>
AuthorDate: Mon Oct 11 15:42:43 2021 +0300

    [DATALAB-2409]: replaced print with logging in all src/ .py scripts except in fabfiles and general/
---
 .../src/base/scripts/configure_keycloak.py         |  6 +-
 .../src/base/scripts/create_ssh_user.py            |  7 +-
 .../src/base/scripts/install_user_key.py           | 17 ++--
 .../src/dataengine/scripts/configure_dataengine.py | 51 ++++++------
 .../scripts/configure_deep_learning_node.py        | 57 ++++++-------
 .../src/edge/scripts/configure_http_proxy.py       |  5 +-
 .../edge/scripts/configure_nginx_reverse_proxy.py  | 16 +---
 .../src/edge/scripts/reupload_ssh_key.py           | 12 +--
 .../src/general/lib/aws/actions_lib.py             |  2 +-
 .../src/general/lib/aws/meta_lib.py                |  2 +-
 .../src/general/lib/azure/actions_lib.py           |  2 +-
 .../src/general/lib/azure/meta_lib.py              |  2 +-
 .../src/general/lib/gcp/actions_lib.py             |  2 +-
 .../src/general/lib/gcp/meta_lib.py                |  2 +-
 .../src/general/lib/os/debian/ssn_lib.py           |  1 +
 .../src/jupyter/scripts/configure_jupyter_node.py  | 51 ++++++------
 .../scripts/configure_jupyterlab_node.py           | 17 ++--
 .../src/project/scripts/configure_http_proxy.py    |  5 +-
 .../src/project/scripts/configure_nftables.py      |  5 +-
 .../scripts/configure_nginx_reverse_proxy.py       | 18 ++--
 .../src/project/scripts/reupload_ssh_key.py        | 12 +--
 .../src/rstudio/scripts/configure_rstudio_node.py  | 31 +++----
 .../src/ssn/scripts/backup.py                      | 37 +++++----
 .../src/ssn/scripts/configure_billing.py           | 11 +--
 .../src/ssn/scripts/configure_conf_file.py         |  9 +-
 .../src/ssn/scripts/configure_docker.py            | 31 +++----
 .../src/ssn/scripts/configure_gitlab.py            | 23 ++---
 .../src/ssn/scripts/configure_mongo.py             |  7 +-
 .../src/ssn/scripts/configure_ssn_node.py          | 49 +++++------
 .../src/ssn/scripts/configure_ui.py                | 26 +++---
 .../src/ssn/scripts/docker_build.py                |  3 +-
 .../src/ssn/scripts/gitlab_deploy.py               | 41 ++++-----
 .../src/ssn/scripts/resource_status.py             |  5 +-
 .../src/ssn/scripts/restore.py                     | 97 +++++++++++-----------
 .../src/ssn/scripts/upload_response_file.py        |  8 +-
 .../superset/scripts/configure_superset_node.py    | 15 ++--
 .../scripts/configure_tensor-rstudio_node.py       | 37 +++++----
 .../src/tensor/scripts/configure_tensor_node.py    | 43 +++++-----
 .../zeppelin/scripts/configure_zeppelin_node.py    | 51 ++++++------
 39 files changed, 404 insertions(+), 412 deletions(-)

diff --git a/infrastructure-provisioning/src/base/scripts/configure_keycloak.py b/infrastructure-provisioning/src/base/scripts/configure_keycloak.py
index 614b812..ecba5d6 100644
--- a/infrastructure-provisioning/src/base/scripts/configure_keycloak.py
+++ b/infrastructure-provisioning/src/base/scripts/configure_keycloak.py
@@ -22,7 +22,7 @@
 # ******************************************************************************
 
 import argparse
-import logging
+from datalab.logger import logging
 import requests
 import uuid
 from datalab.actions_lib import *
@@ -47,7 +47,6 @@ args = parser.parse_args()
 ##############
 if __name__ == "__main__":
     try:
-        print('[CONFIGURE KEYCLOAK]')
         logging.info('[CONFIGURE KEYCLOAK]')
         keycloak_auth_server_url = '{}/realms/master/protocol/openid-connect/token'.format(
             args.keycloak_auth_server_url)
@@ -70,7 +69,6 @@ if __name__ == "__main__":
         keycloak_client_id = str(uuid.uuid4())
         if args.hostname == '':
             keycloak_redirectUris = 'https://{0}/*,http://{0}/*'.format(args.instance_public_ip).lower().split(',')
-            print(keycloak_redirectUris)
         else:
             keycloak_redirectUris = 'https://{0}/*,http://{0}/*,https://{1}/*,http://{1}/*'.format(
                 args.instance_public_ip, args.hostname).lower().split(',')
@@ -98,5 +96,5 @@ if __name__ == "__main__":
             append_result("Failed to configure keycloak.")
             raise Exception
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         append_result("Failed to configure keycloak.", str(err))
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/base/scripts/create_ssh_user.py b/infrastructure-provisioning/src/base/scripts/create_ssh_user.py
index 183295c..0b7c6c7 100644
--- a/infrastructure-provisioning/src/base/scripts/create_ssh_user.py
+++ b/infrastructure-provisioning/src/base/scripts/create_ssh_user.py
@@ -30,6 +30,7 @@ import sys
 import time
 import traceback
 from patchwork.files import exists
+from datalab.logger import logging
 from patchwork import files
 
 parser = argparse.ArgumentParser()
@@ -55,13 +56,13 @@ def ensure_ssh_user(initial_user, os_user, sudo_group):
         conn.sudo('touch /home/{}/.ssh_user_ensured'.format(initial_user))
 
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     global conn
     conn = datalab.fab.init_datalab_connection(args.hostname, args.initial_user, args.keyfile)
-    print("Creating ssh user: {}".format(args.os_user))
+    logging.info("Creating ssh user: {}".format(args.os_user))
     try:
         ensure_ssh_user(args.initial_user, args.os_user, args.sudo_group)
     except Exception as err:
-        print('Failed to create ssh user', str(err))
+        logging.error('Failed to create ssh user', str(err))
         sys.exit(1)
     conn.close()
diff --git a/infrastructure-provisioning/src/base/scripts/install_user_key.py b/infrastructure-provisioning/src/base/scripts/install_user_key.py
index 2e68abd..ef29684 100644
--- a/infrastructure-provisioning/src/base/scripts/install_user_key.py
+++ b/infrastructure-provisioning/src/base/scripts/install_user_key.py
@@ -26,6 +26,7 @@ import json
 import sys
 import subprocess
 from datalab.fab import *
+from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -45,38 +46,38 @@ def copy_key(config):
         user_key = '{}{}.pub'.format(
             config.get('user_keydir'),
             config.get('user_keyname'))
-        print(user_key)
+        logging.info(user_key)
         if 'user_key' not in config or config.get('user_key') == None:
             key = open('{0}'.format(user_key)).read()
         else:
             key = config.get('user_key')
         conn.sudo('echo "{0}" >> /home/{1}/.ssh/authorized_keys'.format(key, args.user))
     except:
-        print('No user key')
+        logging.error('No user key')
 
 ##############
 # Run script #
 ##############
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     try:
         global conn
         conn = datalab.fab.init_datalab_connection(args.hostname, args.user, args.keyfile)
         deeper_config = json.loads(args.additional_config)
     except:
-        print('Fail connection')
+        logging.error('Fail connection')
         sys.exit(2)
     try:
-        print("Ensuring safest ssh ciphers")
+        logging.info("Ensuring safest ssh ciphers")
         ensure_ciphers()
     except:
-        print('Faild to install safest ssh ciphers')
+        logging.error('Faild to install safest ssh ciphers')
 
-    print("Installing users key...")
+    logging.info("Installing users key...")
     try:
         copy_key(deeper_config)
         #conn.close()
     except:
-        print("Users keyfile {0} could not be found at {1}/{0}".format(args.keyfile, deeper_config['user_keydir']))
+        logging.error("Users keyfile {0} could not be found at {1}/{0}".format(args.keyfile, deeper_config['user_keydir']))
         sys.exit(1)
 
diff --git a/infrastructure-provisioning/src/dataengine/scripts/configure_dataengine.py b/infrastructure-provisioning/src/dataengine/scripts/configure_dataengine.py
index 4de0ca3..1c89423 100644
--- a/infrastructure-provisioning/src/dataengine/scripts/configure_dataengine.py
+++ b/infrastructure-provisioning/src/dataengine/scripts/configure_dataengine.py
@@ -27,6 +27,7 @@ import sys
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.notebook_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--hostname', type=str, default='')
@@ -116,12 +117,12 @@ def start_spark(os_user, master_ip, node):
 
 
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     global conn
     conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
 
     # PREPARE DISK
-    print("Prepare .ensure directory")
+    logging.info("Prepare .ensure directory")
     try:
         if not exists(conn,'/home/' + args.os_user + '/.ensure_dir'):
             conn.sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
@@ -129,77 +130,77 @@ if __name__ == "__main__":
         sys.exit(1)
 
     # INSTALL LANGUAGES
-    print("Install Java")
+    logging.info("Install Java")
     ensure_jre_jdk(args.os_user)
     if os.environ['application'] in ('jupyter', 'zeppelin'):
-        print("Install Scala")
+        logging.info("Install Scala")
         ensure_scala(scala_link, args.scala_version, args.os_user)
     if (os.environ['application'] in ('jupyter', 'zeppelin')
         and os.environ['notebook_r_enabled'] == 'true') \
             or os.environ['application'] in ('rstudio', 'tensor-rstudio'):
-        print("Installing R")
+        logging.info("Installing R")
         ensure_r(args.os_user, r_libs)
-    print("Install Python 3 modules")
+    logging.info("Install Python 3 modules")
     ensure_python3_libraries(args.os_user)
     if os.environ['application'] == 'zeppelin':
-        print("Install python3 specific version")
+        logging.info("Install python3 specific version")
         ensure_python3_specific_version(python3_version, args.os_user)
 
     # INSTALL PYTHON IN VIRTUALENV
-    print("Configure Python Virtualenv")
+    logging.info("Configure Python Virtualenv")
     ensure_python_venv(python_venv_version)
 
     # INSTALL SPARK AND CLOUD STORAGE JARS FOR SPARK
-    print("Install Spark")
+    logging.info("Install Spark")
     ensure_local_spark(args.os_user, spark_link, spark_version, hadoop_version, local_spark_path)
-    print("Install storage jars")
+    logging.info("Install storage jars")
     ensure_local_jars(args.os_user, jars_dir)
-    print("Configure local Spark")
+    logging.info("Configure local Spark")
     configure_local_spark(jars_dir, templates_dir, '')
 
     # INSTALL TENSORFLOW AND OTHER DEEP LEARNING LIBRARIES
     if os.environ['application'] in ('tensor', 'tensor-rstudio', 'deeplearning'):
-        print("Installing TensorFlow")
+        logging.info("Installing TensorFlow")
         install_tensor(args.os_user, cuda_version, cuda_file_name,
                        cudnn_version, cudnn_file_name, tensorflow_version,
                        templates_dir, nvidia_version)
-        print("Install Theano")
+        logging.info("Install Theano")
         install_theano(args.os_user, theano_version)
-        print("Installing Keras")
+        logging.info("Installing Keras")
         install_keras(args.os_user, keras_version)
 
     # INSTALL DEEP LEARNING FRAMEWORKS
     if os.environ['application'] == 'deeplearning':
-        print("Installing Caffe2")
+        logging.info("Installing Caffe2")
         install_caffe2(args.os_user, caffe2_version, cmake_version)
-        #print("Installing Torch")
+        #logging.info("Installing Torch")
         #install_torch(args.os_user)
-        print("Install CNTK Python library")
+        logging.info("Install CNTK Python library")
         install_cntk(args.os_user, cntk_version)
-        print("Installing MXNET")
+        logging.info("Installing MXNET")
         install_mxnet(args.os_user, mxnet_version)
 
     # START SPARK CLUSTER
     if args.node_type == 'master':
-        print("Starting Spark master")
+        logging.info("Starting Spark master")
         start_spark(args.os_user, args.hostname, node='master')
     elif args.node_type == 'slave':
-        print("Starting Spark slave")
+        logging.info("Starting Spark slave")
         start_spark(args.os_user, args.master_ip, node='slave')
 
     # INSTALL OPTIONAL PACKAGES
     if os.environ['application'] in ('jupyter', 'zeppelin', 'tensor', 'deeplearning'):
-        print("Install additional Python packages")
+        logging.info("Install additional Python packages")
         ensure_additional_python_libs(args.os_user)
-        print("Install matplotlib")
+        logging.info("Install matplotlib")
         ensure_matplot(args.os_user)
     if os.environ['application'] == 'jupyter':
-        print("Install SBT")
+        logging.info("Install SBT")
         ensure_sbt(args.os_user)
-        print("Install Breeze")
+        logging.info("Install Breeze")
         add_breeze_library_local(args.os_user)
     if os.environ['application'] == 'zeppelin' and os.environ['notebook_r_enabled'] == 'true':
-        print("Install additional R packages")
+        logging.info("Install additional R packages")
         install_r_packages(args.os_user)
 
     # INSTALL LIVY
diff --git a/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py b/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py
index 2c7a88d..f23ee12 100644
--- a/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py
+++ b/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py
@@ -30,6 +30,7 @@ from datalab.notebook_lib import *
 from fabric import *
 from patchwork.files import exists
 from patchwork import files
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--hostname', type=str, default='')
@@ -107,22 +108,22 @@ def configure_jupyterlab_at_gcp_image(os_user, exploratory_name):
 
 
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     global conn
     conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
 
     # PREPARE DISK
-    print("Prepare .ensure directory")
+    logging.info("Prepare .ensure directory")
     try:
         if not exists(conn,'/home/' + args.os_user + '/.ensure_dir'):
             conn.sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
             conn.sudo('touch /home/' + args.os_user + '/.ensure_dir/deep_learning')
     except:
         sys.exit(1)
-    print("Mount additional volume")
+    logging.info("Mount additional volume")
     if os.environ['conf_cloud_provider'] == 'gcp' and os.environ['conf_deeplearning_cloud_ami'] == 'true':
-        print('Additional disk premounted by google image')
-        print('Installing nvidia drivers')
+        logging.info('Additional disk premounted by google image')
+        logging.info('Installing nvidia drivers')
         try:
             conn.sudo('/opt/deeplearning/install-driver.sh')
         except:
@@ -133,76 +134,76 @@ if __name__ == "__main__":
 
     if os.environ['conf_deeplearning_cloud_ami'] == 'false':
         # INSTALL LANGUAGES
-        print("Install Java")
+        logging.info("Install Java")
         ensure_jre_jdk(args.os_user)
-        print("Install Python 3 modules")
+        logging.info("Install Python 3 modules")
         ensure_python3_libraries(args.os_user)
 
         # INSTALL TENSORFLOW AND OTHER DEEP LEARNING LIBRARIES AND FRAMEWORKS
-        print("Install TensorFlow")
+        logging.info("Install TensorFlow")
         install_tensor(args.os_user, cuda_version, cuda_file_name,
                        cudnn_version, cudnn_file_name, tensorflow_version,
                        templates_dir, nvidia_version)
-        print("Install Theano")
+        logging.info("Install Theano")
         install_theano(args.os_user, theano_version)
-        print("Installing Keras")
+        logging.info("Installing Keras")
         install_keras(args.os_user, keras_version)
-        print("Installing Caffe2")
+        logging.info("Installing Caffe2")
         install_caffe2(args.os_user, caffe2_version, cmake_version)
-        #print("Installing Torch")
+        #logging.info("Installing Torch")
         #install_torch(args.os_user)
-        print("Install CNTK Python library")
+        logging.info("Install CNTK Python library")
         install_cntk(args.os_user, cntk_version)
-        print("Installing MXNET")
+        logging.info("Installing MXNET")
         install_mxnet(args.os_user, mxnet_version)
 
         # INSTALL JUPYTER NOTEBOOK
-        print("Install Jupyter")
+        logging.info("Install Jupyter")
         configure_jupyter(args.os_user, jupyter_conf_file, templates_dir, args.jupyter_version, args.exploratory_name)
 
         # INSTALL SPARK AND CLOUD STORAGE JARS FOR SPARK
-        print("Install local Spark")
+        logging.info("Install local Spark")
         ensure_local_spark(args.os_user, spark_link, spark_version, hadoop_version, local_spark_path)
-        print("Install storage jars")
+        logging.info("Install storage jars")
         ensure_local_jars(args.os_user, jars_dir)
-        print("Configure local Spark")
+        logging.info("Configure local Spark")
         configure_local_spark(jars_dir, templates_dir)
 
         # INSTALL JUPYTER KERNELS
-        print("Install pyspark local kernel for Jupyter")
+        logging.info("Install pyspark local kernel for Jupyter")
         ensure_pyspark_local_kernel(args.os_user, pyspark_local_path_dir, templates_dir, spark_version)
-        print("Install py3spark local kernel for Jupyter")
+        logging.info("Install py3spark local kernel for Jupyter")
         ensure_py3spark_local_kernel(args.os_user, py3spark_local_path_dir, templates_dir, spark_version)
-        #print("Installing ITorch kernel for Jupyter")
+        #logging.info("Installing ITorch kernel for Jupyter")
         #install_itorch(args.os_user)
 
         # INSTALL OPTIONAL PACKAGES
-        print("Installing additional Python packages")
+        logging.info("Installing additional Python packages")
         ensure_additional_python_libs(args.os_user)
-        print("Install Matplotlib")
+        logging.info("Install Matplotlib")
         ensure_matplot(args.os_user)
     elif os.environ['conf_deeplearning_cloud_ami'] == 'true' and os.environ['conf_cloud_provider'] != 'gcp':
         # CONFIGURE JUPYTER NOTEBOOK
-        print("Configure Jupyter")
+        logging.info("Configure Jupyter")
         configure_jupyter(args.os_user, jupyter_conf_file, templates_dir, args.jupyter_version, args.exploratory_name)
     else:
         configure_jupyterlab_at_gcp_image(args.os_user, args.exploratory_name)
 
 
     # INSTALL UNGIT
-    print("Install nodejs")
+    logging.info("Install nodejs")
     install_nodejs(args.os_user)
-    print("Install Ungit")
+    logging.info("Install Ungit")
     install_ungit(args.os_user, args.exploratory_name, args.edge_ip)
     if exists(conn, '/home/{0}/{1}'.format(args.os_user, gitlab_certfile)):
         install_gitlab_cert(args.os_user, gitlab_certfile)
 
     # INSTALL INACTIVITY CHECKER
-    print("Install inactivity checker")
+    logging.info("Install inactivity checker")
     install_inactivity_checker(args.os_user, args.ip_address)
 
     #POST INSTALLATION PROCESS
-    print("Updating pyOpenSSL library")
+    logging.info("Updating pyOpenSSL library")
     update_pyopenssl_lib(args.os_user)
 
     conn.close()
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/edge/scripts/configure_http_proxy.py b/infrastructure-provisioning/src/edge/scripts/configure_http_proxy.py
index 0e9034e..a7e8e53 100644
--- a/infrastructure-provisioning/src/edge/scripts/configure_http_proxy.py
+++ b/infrastructure-provisioning/src/edge/scripts/configure_http_proxy.py
@@ -26,6 +26,7 @@ import json
 import sys
 from fabric import *
 from datalab.fab import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--hostname', type=str, default='')
@@ -38,7 +39,7 @@ args = parser.parse_args()
 # Run script #
 ##############
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     try:
         global conn
         conn = datalab.fab.init_datalab_connection(args.hostname, args.user, args.keyfile)
@@ -46,6 +47,6 @@ if __name__ == "__main__":
     except:
         sys.exit(2)
 
-    print("Installing proxy for notebooks.")
+    logging.info("Installing proxy for notebooks.")
     datalab.fab.configure_http_proxy_server(deeper_config)
     conn.close()
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/edge/scripts/configure_nginx_reverse_proxy.py b/infrastructure-provisioning/src/edge/scripts/configure_nginx_reverse_proxy.py
index 6513db8..58d6a49 100644
--- a/infrastructure-provisioning/src/edge/scripts/configure_nginx_reverse_proxy.py
+++ b/infrastructure-provisioning/src/edge/scripts/configure_nginx_reverse_proxy.py
@@ -22,7 +22,7 @@
 # ******************************************************************************
 
 import argparse
-import logging
+from datalab.logger import logging
 import os
 import sys
 from datalab.edge_lib import install_nginx_ldap
@@ -36,20 +36,12 @@ parser.add_argument('--user', type=str, default='')
 args = parser.parse_args()
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'],
-                                               os.environ['edge_user_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/edge/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
-    print("Configure connections")
+    logging.info("Configure connections")
     try:
         global conn
         conn = datalab.fab.init_datalab_connection(args.hostname, args.user, args.keyfile)
     except Exception as err:
-        print("Failed establish connection. Excpeption: " + str(err))
+        logging.error("Failed establish connection. Excpeption: " + str(err))
         sys.exit(1)
 
     try:
@@ -58,6 +50,6 @@ if __name__ == "__main__":
                            os.environ['ldap_ou'], os.environ['ldap_service_password'],
                            os.environ['ldap_service_username'], os.environ['aws_iam_user'])
     except Exception as err:
-        print("Failed install nginx reverse proxy: " + str(err))
+        logging.error("Failed install nginx reverse proxy: " + str(err))
         sys.exit(1)
     conn.close()
diff --git a/infrastructure-provisioning/src/edge/scripts/reupload_ssh_key.py b/infrastructure-provisioning/src/edge/scripts/reupload_ssh_key.py
index 4f8483b..0199f88 100644
--- a/infrastructure-provisioning/src/edge/scripts/reupload_ssh_key.py
+++ b/infrastructure-provisioning/src/edge/scripts/reupload_ssh_key.py
@@ -21,7 +21,7 @@
 #
 # ******************************************************************************
 
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -32,18 +32,10 @@ from datalab.meta_lib import *
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/edge/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     try:
         if os.environ['conf_cloud_provider'] == 'aws':
             create_aws_config_files()
         logging.info('[REUPLOADING USER SSH KEY]')
-        print('[REUPLOADING USER SSH KEY]')
         reupload_config = dict()
         reupload_config['os_user'] = os.environ['conf_os_user']
         reupload_config['edge_user_name'] = os.environ['edge_user_name']
@@ -51,7 +43,7 @@ if __name__ == "__main__":
         reupload_config['resource_id'] = os.environ['resource_id']
         reupload_config['additional_config'] = {"user_keyname": reupload_config['edge_user_name'],
                                                 "user_keydir": os.environ['conf_key_dir']}
-        print(reupload_config)
+        logging.info(reupload_config)
         try:
             params = "--conf_resource {} --instance_id {} --os_user '{}'" \
                      " --keyfile '{}' --additional_config '{}'".format(
diff --git a/infrastructure-provisioning/src/general/lib/aws/actions_lib.py b/infrastructure-provisioning/src/general/lib/aws/actions_lib.py
index 567c3b6..4eddcbd 100644
--- a/infrastructure-provisioning/src/general/lib/aws/actions_lib.py
+++ b/infrastructure-provisioning/src/general/lib/aws/actions_lib.py
@@ -24,7 +24,7 @@ import backoff
 import boto3
 import botocore
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import time
diff --git a/infrastructure-provisioning/src/general/lib/aws/meta_lib.py b/infrastructure-provisioning/src/general/lib/aws/meta_lib.py
index 858c58d..4d8c397 100644
--- a/infrastructure-provisioning/src/general/lib/aws/meta_lib.py
+++ b/infrastructure-provisioning/src/general/lib/aws/meta_lib.py
@@ -23,7 +23,7 @@ import datalab.actions_lib
 import backoff
 import boto3
 import json
-import logging
+from datalab.logger import logging
 import sys
 import time
 import traceback
diff --git a/infrastructure-provisioning/src/general/lib/azure/actions_lib.py b/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
index 756dd27..35f8072 100644
--- a/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
+++ b/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
@@ -26,7 +26,7 @@ import datalab.common_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import time
diff --git a/infrastructure-provisioning/src/general/lib/azure/meta_lib.py b/infrastructure-provisioning/src/general/lib/azure/meta_lib.py
index 510e875..821d8a9 100644
--- a/infrastructure-provisioning/src/general/lib/azure/meta_lib.py
+++ b/infrastructure-provisioning/src/general/lib/azure/meta_lib.py
@@ -31,7 +31,7 @@ from azure.datalake.store import core, lib
 from azure.graphrbac import GraphRbacManagementClient
 from azure.common.credentials import ServicePrincipalCredentials
 import azure.common.exceptions as AzureExceptions
-import logging
+from datalab.logger import logging
 import traceback
 import sys
 import os
diff --git a/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py b/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py
index fa1d891..df608ea 100644
--- a/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py
+++ b/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py
@@ -26,7 +26,7 @@ import datalab.fab
 import datalab.meta_lib
 import google.auth
 import json
-import logging
+from datalab.logger import logging
 import os
 import random
 import sys
diff --git a/infrastructure-provisioning/src/general/lib/gcp/meta_lib.py b/infrastructure-provisioning/src/general/lib/gcp/meta_lib.py
index be5d17b..1610729 100644
--- a/infrastructure-provisioning/src/general/lib/gcp/meta_lib.py
+++ b/infrastructure-provisioning/src/general/lib/gcp/meta_lib.py
@@ -21,7 +21,7 @@
 
 import backoff
 import google.auth
-import logging
+from datalab.logger import logging
 import os
 import re
 import sys
diff --git a/infrastructure-provisioning/src/general/lib/os/debian/ssn_lib.py b/infrastructure-provisioning/src/general/lib/os/debian/ssn_lib.py
index 5439abc..81af9b1 100644
--- a/infrastructure-provisioning/src/general/lib/os/debian/ssn_lib.py
+++ b/infrastructure-provisioning/src/general/lib/os/debian/ssn_lib.py
@@ -29,6 +29,7 @@ import traceback
 from datalab.common_lib import manage_pkg
 from datalab.fab import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 from fabric import *
 import subprocess
 
diff --git a/infrastructure-provisioning/src/jupyter/scripts/configure_jupyter_node.py b/infrastructure-provisioning/src/jupyter/scripts/configure_jupyter_node.py
index 3369ab2..6975be3 100644
--- a/infrastructure-provisioning/src/jupyter/scripts/configure_jupyter_node.py
+++ b/infrastructure-provisioning/src/jupyter/scripts/configure_jupyter_node.py
@@ -27,6 +27,7 @@ import sys
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.notebook_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--hostname', type=str, default='')
@@ -71,87 +72,87 @@ gitlab_certfile = os.environ['conf_gitlab_certfile']
 # Run script #
 ##############
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     global conn
     conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
 
     # PREPARE DISK
-    print("Prepare .ensure directory")
+    logging.info("Prepare .ensure directory")
     try:
         if not exists(conn,'/home/' + args.os_user + '/.ensure_dir'):
             conn.sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
     except:
         sys.exit(1)
-    print("Mount additional volume")
+    logging.info("Mount additional volume")
     prepare_disk(args.os_user)
 
     # INSTALL LANGUAGES
-    print("Install Java")
+    logging.info("Install Java")
     ensure_jre_jdk(args.os_user)
-    print("Install Scala")
+    logging.info("Install Scala")
     ensure_scala(scala_link, args.scala_version, args.os_user)
     if os.environ['notebook_r_enabled'] == 'true':
-        print("Installing R")
+        logging.info("Installing R")
         ensure_r(args.os_user, r_libs)
-    print("Install Python 3 modules")
+    logging.info("Install Python 3 modules")
     ensure_python3_libraries(args.os_user)
 
     # INSTALL PYTHON IN VIRTUALENV
-    print("Configure Python Virtualenv")
+    logging.info("Configure Python Virtualenv")
     ensure_python_venv(python_venv_version)
 
     # INSTALL JUPYTER NOTEBOOK
-    print("Install Jupyter")
+    logging.info("Install Jupyter")
     configure_jupyter(args.os_user, jupyter_conf_file, templates_dir, jupyter_version, args.exploratory_name)
 
     # INSTALL SPARK AND CLOUD STORAGE JARS FOR SPARK
-    print("Install local Spark")
+    logging.info("Install local Spark")
     ensure_local_spark(args.os_user, spark_link, spark_version, hadoop_version, local_spark_path)
     local_spark_scala_version = conn.run(
         'export PATH=$PATH:' + local_spark_path + 'bin/; spark-submit --version 2>&1 | grep -o -P "Scala version \K.{0,7}"').stdout.replace(
         '\n', '')
-    print("Install storage jars")
+    logging.info("Install storage jars")
     ensure_local_jars(args.os_user, jars_dir)
-    print("Configure local Spark")
+    logging.info("Configure local Spark")
     configure_local_spark(jars_dir, templates_dir)
 
     # INSTALL JUPYTER KERNELS
-    #print("Install pyspark local kernel for Jupyter")
+    #logging.info("Install pyspark local kernel for Jupyter")
     #ensure_pyspark_local_kernel(args.os_user, pyspark_local_path_dir, templates_dir, spark_version)
-    print("Install py3spark local kernel for Jupyter")
+    logging.info("Install py3spark local kernel for Jupyter")
     ensure_py3spark_local_kernel(args.os_user, py3spark_local_path_dir, templates_dir, spark_version, python_venv_path, python_venv_version)
-    print("Install Toree-Scala kernel for Jupyter")
+    logging.info("Install Toree-Scala kernel for Jupyter")
     ensure_toree_local_kernel(args.os_user, toree_link, scala_kernel_path, files_dir, local_spark_scala_version, spark_version)
     if os.environ['notebook_r_enabled'] == 'true':
-        print("Install R kernel for Jupyter")
+        logging.info("Install R kernel for Jupyter")
         ensure_r_local_kernel(spark_version, args.os_user, templates_dir, r_kernels_dir)
 
     # INSTALL UNGIT
-    print("Install nodejs")
+    logging.info("Install nodejs")
     install_nodejs(args.os_user)
-    print("Install ungit")
+    logging.info("Install ungit")
     install_ungit(args.os_user, args.exploratory_name, args.edge_ip)
     if exists(conn, '/home/{0}/{1}'.format(args.os_user, gitlab_certfile)):
         install_gitlab_cert(args.os_user, gitlab_certfile)
 
     # INSTALL INACTIVITY CHECKER
-    print("Install inactivity checker")
+    logging.info("Install inactivity checker")
     install_inactivity_checker(args.os_user, args.ip_address)
 
     # INSTALL OPTIONAL PACKAGES
-    print("Installing additional Python packages")
+    logging.info("Installing additional Python packages")
     ensure_additional_python_libs(args.os_user)
-    print("Install Matplotlib")
+    logging.info("Install Matplotlib")
     ensure_matplot(args.os_user)
-    print("Install SBT")
+    logging.info("Install SBT")
     ensure_sbt(args.os_user)
-    print("Install Breeze")
+    logging.info("Install Breeze")
     add_breeze_library_local(args.os_user)
 
     #POST INSTALLATION PROCESS
-    print("Updating pyOpenSSL library")
+    logging.info("Updating pyOpenSSL library")
     update_pyopenssl_lib(args.os_user)
-    print("Removing unexisting kernels")
+    logging.info("Removing unexisting kernels")
     remove_unexisting_kernel(args.os_user)
 
     conn.close()
diff --git a/infrastructure-provisioning/src/jupyterlab/scripts/configure_jupyterlab_node.py b/infrastructure-provisioning/src/jupyterlab/scripts/configure_jupyterlab_node.py
index e8b5862..3a3bd7f 100644
--- a/infrastructure-provisioning/src/jupyterlab/scripts/configure_jupyterlab_node.py
+++ b/infrastructure-provisioning/src/jupyterlab/scripts/configure_jupyterlab_node.py
@@ -27,6 +27,7 @@ import sys
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.notebook_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--hostname', type=str, default='')
@@ -78,38 +79,38 @@ gitlab_certfile = os.environ['conf_gitlab_certfile']
 # Run script #
 ##############
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     global conn
     conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
 
     # PREPARE DISK
-    print("Prepare .ensure directory")
+    logging.info("Prepare .ensure directory")
     try:
         if not exists(conn,'/home/' + args.os_user + '/.ensure_dir'):
             conn.sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
     except:
         sys.exit(1)
-    print("Mount additional volume")
+    logging.info("Mount additional volume")
     prepare_disk(args.os_user)
 
     # INSTALL DOCKER
-    print ("Install Docker")
+    logging.info ("Install Docker")
     configure_docker(args.os_user)
 
     # CONFIGURE JUPYTER FILES
-    print("Configure jupyter files")
+    logging.info("Configure jupyter files")
     ensure_jupyterlab_files(args.os_user, jupyterlab_dir, jupyterlab_image, jupyter_conf_file, jupyterlab_conf_file, args.exploratory_name, args.edge_ip)
 
     # INSTALL UNGIT
-    print("Install nodejs")
+    logging.info("Install nodejs")
     install_nodejs(args.os_user)
-    print("Install ungit")
+    logging.info("Install ungit")
     install_ungit(args.os_user, args.exploratory_name, args.edge_ip)
     if exists(conn, '/home/{0}/{1}'.format(args.os_user, gitlab_certfile)):
         install_gitlab_cert(args.os_user, gitlab_certfile)
 
     # INSTALL INACTIVITY CHECKER
-    print("Install inactivity checker")
+    logging.info("Install inactivity checker")
     install_inactivity_checker(args.os_user, args.ip_address)
 
     conn.close()
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/project/scripts/configure_http_proxy.py b/infrastructure-provisioning/src/project/scripts/configure_http_proxy.py
index 4af93ff..ecddaf0 100644
--- a/infrastructure-provisioning/src/project/scripts/configure_http_proxy.py
+++ b/infrastructure-provisioning/src/project/scripts/configure_http_proxy.py
@@ -26,6 +26,7 @@ import json
 import sys
 from fabric import *
 from datalab.fab import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--hostname', type=str, default='')
@@ -38,7 +39,7 @@ args = parser.parse_args()
 # Run script #
 ##############
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     try:
         global conn
         conn = datalab.fab.init_datalab_connection(args.hostname, args.user, args.keyfile)
@@ -46,7 +47,7 @@ if __name__ == "__main__":
     except:
         sys.exit(2)
 
-    print("Installing proxy for notebooks.")
+    logging.info("Installing proxy for notebooks.")
     datalab.fab.configure_http_proxy_server(deeper_config)
 
     conn.close()
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/project/scripts/configure_nftables.py b/infrastructure-provisioning/src/project/scripts/configure_nftables.py
index 8fe14cd..b00e228 100644
--- a/infrastructure-provisioning/src/project/scripts/configure_nftables.py
+++ b/infrastructure-provisioning/src/project/scripts/configure_nftables.py
@@ -26,6 +26,7 @@ import json
 import sys
 from fabric import *
 from datalab.fab import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--hostname', type=str, default='')
@@ -38,7 +39,7 @@ args = parser.parse_args()
 # Run script #
 ##############
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     try:
         global conn
         conn = datalab.fab.init_datalab_connection(args.hostname, args.user, args.keyfile)
@@ -46,6 +47,6 @@ if __name__ == "__main__":
     except:
         sys.exit(2)
 
-    print("Configuring nftables on edge node.")
+    logging.info("Configuring nftables on edge node.")
     datalab.fab.configure_nftables(deeper_config)
     conn.close()
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/project/scripts/configure_nginx_reverse_proxy.py b/infrastructure-provisioning/src/project/scripts/configure_nginx_reverse_proxy.py
index 8baa5ce..472b252 100644
--- a/infrastructure-provisioning/src/project/scripts/configure_nginx_reverse_proxy.py
+++ b/infrastructure-provisioning/src/project/scripts/configure_nginx_reverse_proxy.py
@@ -22,7 +22,7 @@
 # ******************************************************************************
 
 import argparse
-import logging
+from datalab.logger import logging
 import os
 import sys
 from datalab.common_lib import ensure_step
@@ -40,26 +40,18 @@ parser.add_argument('--step_cert_sans', type=str, default='')
 args = parser.parse_args()
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'],
-                                               os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/edge/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
-    print("Configure connections")
+    logging.info("Configure connections")
     try:
         global conn
         conn = datalab.fab.init_datalab_connection(args.hostname, args.user, args.keyfile)
     except Exception as err:
-        print("Failed establish connection. Excpeption: " + str(err))
+        logging.error("Failed establish connection. Excpeption: " + str(err))
         sys.exit(1)
     if os.environ['conf_stepcerts_enabled'] == 'true':
         try:
             ensure_step(args.user)
         except Exception as err:
-            print("Failed install step: " + str(err))
+            logging.error("Failed install step: " + str(err))
             sys.exit(1)
 
     try:
@@ -68,7 +60,7 @@ if __name__ == "__main__":
                           args.keycloak_client_id, args.keycloak_client_secret, args.user, args.hostname,
                           args.step_cert_sans)
     except Exception as err:
-        print("Failed install nginx reverse proxy: " + str(err))
+        logging.error("Failed install nginx reverse proxy: " + str(err))
         sys.exit(1)
 
     conn.close()
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/project/scripts/reupload_ssh_key.py b/infrastructure-provisioning/src/project/scripts/reupload_ssh_key.py
index 4f8483b..0199f88 100644
--- a/infrastructure-provisioning/src/project/scripts/reupload_ssh_key.py
+++ b/infrastructure-provisioning/src/project/scripts/reupload_ssh_key.py
@@ -21,7 +21,7 @@
 #
 # ******************************************************************************
 
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -32,18 +32,10 @@ from datalab.meta_lib import *
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/edge/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     try:
         if os.environ['conf_cloud_provider'] == 'aws':
             create_aws_config_files()
         logging.info('[REUPLOADING USER SSH KEY]')
-        print('[REUPLOADING USER SSH KEY]')
         reupload_config = dict()
         reupload_config['os_user'] = os.environ['conf_os_user']
         reupload_config['edge_user_name'] = os.environ['edge_user_name']
@@ -51,7 +43,7 @@ if __name__ == "__main__":
         reupload_config['resource_id'] = os.environ['resource_id']
         reupload_config['additional_config'] = {"user_keyname": reupload_config['edge_user_name'],
                                                 "user_keydir": os.environ['conf_key_dir']}
-        print(reupload_config)
+        logging.info(reupload_config)
         try:
             params = "--conf_resource {} --instance_id {} --os_user '{}'" \
                      " --keyfile '{}' --additional_config '{}'".format(
diff --git a/infrastructure-provisioning/src/rstudio/scripts/configure_rstudio_node.py b/infrastructure-provisioning/src/rstudio/scripts/configure_rstudio_node.py
index 397a22b..5ce8645 100644
--- a/infrastructure-provisioning/src/rstudio/scripts/configure_rstudio_node.py
+++ b/infrastructure-provisioning/src/rstudio/scripts/configure_rstudio_node.py
@@ -28,6 +28,7 @@ from datalab.actions_lib import *
 from datalab.common_lib import *
 from datalab.fab import *
 from datalab.notebook_lib import *
+from datalab.logger import logging
 from fabric import *
 from patchwork.files import exists
 from patchwork import files
@@ -67,58 +68,58 @@ gitlab_certfile = os.environ['conf_gitlab_certfile']
 # Run script #
 ##############
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     global conn
     conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
 
     # PREPARE DISK
-    print("Prepare .ensure directory")
+    logging.info("Prepare .ensure directory")
     try:
         if not exists(conn,'/home/' + args.os_user + '/.ensure_dir'):
             conn.sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
     except:
         sys.exit(1)
-    print("Mount additional volume")
+    logging.info("Mount additional volume")
     prepare_disk(args.os_user)
 
     # INSTALL LANGUAGES
-    print("Install Java")
+    logging.info("Install Java")
     ensure_jre_jdk(args.os_user)
-    print("Install R")
+    logging.info("Install R")
     ensure_r(args.os_user, r_libs)
-    print("Install Python 3 modules")
+    logging.info("Install Python 3 modules")
     ensure_python3_libraries(args.os_user)
 
     # INSTALL PYTHON IN VIRTUALENV
-    print("Configure Python Virtualenv")
+    logging.info("Configure Python Virtualenv")
     ensure_python_venv(python_venv_version)
 
     # INSTALL RSTUDIO
-    print("Install RStudio")
+    logging.info("Install RStudio")
     install_rstudio(args.os_user, local_spark_path, args.rstudio_pass, args.rstudio_version, python_venv_version)
 
     # INSTALL SPARK AND CLOUD STORAGE JARS FOR SPARK
-    print("Install local Spark")
+    logging.info("Install local Spark")
     ensure_local_spark(args.os_user, spark_link, spark_version, hadoop_version, local_spark_path)
-    print("Install storage jars")
+    logging.info("Install storage jars")
     ensure_local_jars(args.os_user, jars_dir)
-    print("Configure local Spark")
+    logging.info("Configure local Spark")
     configure_local_spark(jars_dir, templates_dir)
 
     # INSTALL UNGIT
-    print("Install nodejs")
+    logging.info("Install nodejs")
     install_nodejs(args.os_user)
-    print("Install Ungit")
+    logging.info("Install Ungit")
     install_ungit(args.os_user, args.exploratory_name, args.edge_ip)
     if exists(conn, '/home/{0}/{1}'.format(args.os_user, gitlab_certfile)):
         install_gitlab_cert(args.os_user, gitlab_certfile)
 
     # INSTALL INACTIVITY CHECKER
-    print("Install inactivity checker")
+    logging.info("Install inactivity checker")
     install_inactivity_checker(args.os_user, args.ip_address, True)
 
     #POST INSTALLATION PROCESS
-    print("Updating pyOpenSSL library")
+    logging.info("Updating pyOpenSSL library")
     update_pyopenssl_lib(args.os_user)
 
     conn.close()
diff --git a/infrastructure-provisioning/src/ssn/scripts/backup.py b/infrastructure-provisioning/src/ssn/scripts/backup.py
index dc6d91c..971551a 100644
--- a/infrastructure-provisioning/src/ssn/scripts/backup.py
+++ b/infrastructure-provisioning/src/ssn/scripts/backup.py
@@ -28,6 +28,7 @@ import sys
 import yaml
 from fabric import *
 from time import gmtime, strftime
+from datalab.logger import logging
 import subprocess
 
 parser = argparse.ArgumentParser(description="Backup script for DataLab configs, keys, certs, jars, database & logs")
@@ -72,9 +73,9 @@ def backup_prepare():
 
 def backup_configs():
     try:
-        print('Backup configs: {}'.format(args.configs))
+        logging.info('Backup configs: {}'.format(args.configs))
         if args.configs == 'skip':
-            print('Skipped config backup.')
+            logging.info('Skipped config backup.')
         elif args.configs == 'all':
             subprocess.run("find {0}{2} -name '*yml' -exec cp {3} {1}{2} \;".format(args.datalab_path, temp_folder, conf_folder,
                                                                            "{}"), shell=True, check=True)
@@ -88,9 +89,9 @@ def backup_configs():
 
 def backup_keys():
     try:
-        print('Backup keys: {}'.format(args.keys))
+        logging.info('Backup keys: {}'.format(args.keys))
         if args.keys == 'skip':
-            print('Skipped keys backup.')
+            logging.info('Skipped keys backup.')
         elif args.keys == 'all':
             subprocess.run('cp {0}* {1}keys'.format(keys_folder, temp_folder), shell=True, check=True)
         else:
@@ -103,9 +104,9 @@ def backup_keys():
 
 def backup_certs():
     try:
-        print('Backup certs: {}'.format(args.certs))
+        logging.info('Backup certs: {}'.format(args.certs))
         if args.certs == 'skip':
-            print('Skipped certs backup.')
+            logging.info('Skipped certs backup.')
         elif args.certs == 'all':
             for cert in all_certs:
                 subprocess.run('sudo cp {0}{1} {2}certs'.format(certs_folder, cert, temp_folder), shell=True, check=True)
@@ -121,9 +122,9 @@ def backup_certs():
 
 def backup_jars():
     try:
-        print('Backup jars: {}'.format(args.jars))
+        logging.info('Backup jars: {}'.format(args.jars))
         if args.jars == 'skip':
-            print('Skipped jars backup.')
+            logging.info('Skipped jars backup.')
         elif args.jars == 'all':
             for root, dirs, files in os.walk('{0}{1}'.format(args.datalab_path, jars_folder)):
                 for service in dirs:
@@ -138,7 +139,7 @@ def backup_jars():
 
 def backup_database():
     try:
-        print('Backup db: {}'.format(args.db))
+        logging.info('Backup db: {}'.format(args.db))
         if args.db:
             ssn_conf = open('{0}{1}ssn.yml'.format(args.datalab_path, conf_folder)).read()
             data = yaml.load('mongo{}'.format(ssn_conf.split('mongo')[-1]))
@@ -153,30 +154,30 @@ def backup_database():
 
 def backup_logs():
     try:
-        print('Backup logs: {}'.format(args.logs))
+        logging.info('Backup logs: {}'.format(args.logs))
         if args.logs:
-            print('Backup DataLab logs')
+            logging.info('Backup DataLab logs')
             subprocess.run('cp -R {0}* {1}logs'.format(datalab_logs_folder, temp_folder), shell=True, check=True)
-            print('Backup docker logs')
+            logging.info('Backup docker logs')
             subprocess.run("sudo find {0} -name '*log' -exec cp {2} {1}logs/docker \;".format(docker_logs_folder, temp_folder,
                                                                                      "{}"), shell=True, check=True)
             subprocess.run('sudo chown -R {0}:{0} {1}logs/docker'.format(os_user, temp_folder), shell=True, check=True)
     except:
         append_result(error='Backup logs failed.')
-        print('Backup logs failed.')
+        logging.error('Backup logs failed.')
         sys.exit(1)
 
 
 def backup_finalize():
     try:
-        print('Compressing all files to archive...')
+        logging.info('Compressing all files to archive...')
         subprocess.run('cd {0} && tar -zcf {1} .'.format(temp_folder, dest_file), shell=True, check=True)
     except Exception as err:
         append_result(error='Compressing backup failed. {}'.format(str(err)))
         sys.exit(1)
 
     try:
-        print('Clear temp folder...')
+        logging.info('Clear temp folder...')
         if temp_folder != '/':
             subprocess.run('rm -rf {}'.format(temp_folder), shell=True, check=True)
     except Exception as err:
@@ -189,12 +190,12 @@ def append_result(status='failed', error='', backup_file=''):
         res = {"status": status,
                "request_id": args.request_id}
         if status == 'failed':
-            print(error)
+            logging.info(error)
             res['error_message'] = error
         elif status == 'created':
-            print('Successfully created backup file: {}'.format(backup_file))
+            logging.info('Successfully created backup file: {}'.format(backup_file))
             res['backup_file'] = backup_file
-        print(json.dumps(res))
+        logging.info(json.dumps(res))
         result.write(json.dumps(res))
 
 
diff --git a/infrastructure-provisioning/src/ssn/scripts/configure_billing.py b/infrastructure-provisioning/src/ssn/scripts/configure_billing.py
index f3357a5..7ef7380 100644
--- a/infrastructure-provisioning/src/ssn/scripts/configure_billing.py
+++ b/infrastructure-provisioning/src/ssn/scripts/configure_billing.py
@@ -24,6 +24,7 @@
 import argparse
 import sys
 from fabric import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--cloud_provider', type=str,
@@ -141,7 +142,7 @@ def yml_billing(path):
         f.write(config_orig)
         f.close()
     except:
-        print("Could not write the target file {}".format(path))
+        logging.error("Could not write the target file {}".format(path))
         sys.exit(1)
 
 def yml_billing_app(path):
@@ -162,7 +163,7 @@ def yml_billing_app(path):
         f.write(config_orig)
         f.close()
     except:
-        print("Could not write the target file {}".format(path))
+        logging.error("Could not write the target file {}".format(path))
         sys.exit(1)
 
 
@@ -177,7 +178,7 @@ def yml_self_service(path):
         f.write(config_orig)
         f.close()
     except:
-        print("Could not write the target file {}".format(path))
+        logging.error("Could not write the target file {}".format(path))
         sys.exit(1)
 
 
@@ -185,7 +186,7 @@ def yml_self_service(path):
 # Run script #
 ##############
 if __name__ == "__main__":
-    print("Configure billing")
+    logging.info("Configure billing")
     # Check cloud provider
     # Access to the bucket without credentials?
     try:
@@ -194,7 +195,7 @@ if __name__ == "__main__":
             yml_billing_app(args.datalab_dir + 'conf/billing_app.yml')
         yml_self_service(args.datalab_dir + 'conf/self-service.yml')
     except:
-        print('Error configure billing')
+        logging.error('Error configure billing')
         sys.exit(1)
 
     sys.exit(0)
diff --git a/infrastructure-provisioning/src/ssn/scripts/configure_conf_file.py b/infrastructure-provisioning/src/ssn/scripts/configure_conf_file.py
index dbcd025..bb7d42d 100644
--- a/infrastructure-provisioning/src/ssn/scripts/configure_conf_file.py
+++ b/infrastructure-provisioning/src/ssn/scripts/configure_conf_file.py
@@ -26,6 +26,7 @@ import argparse
 import json
 import sys
 from fabric import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--datalab_dir', type=str, default='')
@@ -55,18 +56,18 @@ def modify_conf_file():
             options = config.options(section)
             for option in options:
                 try:
-                    print('Trying to put variable {}_{} to conf file'.format(section, option))
+                    logging.info('Trying to put variable {}_{} to conf file'.format(section, option))
                     config.set(section, option, variables_list['{}_{}'.format(section, option)])
                 except:
-                    print('Such variable doesn`t exist!')
+                    logging.error('Such variable doesn`t exist!')
                     config.remove_option(section, option)
 
         with open('{}sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(args.datalab_dir),
                   'w') as conf_file_final:
             config.write(conf_file_final)
     except Exception as error:
-        print('Error with modifying conf files:')
-        print(str(error))
+        logging.error('Error with modifying conf files:')
+        logging.error(str(error))
         sys.exit(1)
 
 
diff --git a/infrastructure-provisioning/src/ssn/scripts/configure_docker.py b/infrastructure-provisioning/src/ssn/scripts/configure_docker.py
index 1e6e31d..bc64c58 100644
--- a/infrastructure-provisioning/src/ssn/scripts/configure_docker.py
+++ b/infrastructure-provisioning/src/ssn/scripts/configure_docker.py
@@ -27,6 +27,7 @@ import os
 import sys
 import time
 from datalab.ssn_lib import *
+from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -69,7 +70,7 @@ def download_toree():
         conn.run('mv ./toree-0.5.0-incubating/lib/toree-assembly-0.5.0-incubating.jar {}toree-assembly-0.5.0.jar'.format(toree_path))
     except Exception as err:
         traceback.print_exc()
-        print('Failed to download toree: ', str(err))
+        logging.error('Failed to download toree: ', str(err))
         sys.exit(1)
 
 
@@ -86,7 +87,7 @@ def login_in_gcr(os_user, gcr_creds, odahu_image, datalab_path, cloud_provider):
                     conn.sudo('apt-get -y install google-cloud-sdk')
                 except Exception as err:
                     traceback.print_exc()
-                    print('Failed to install gcloud: ', str(err))
+                    logging.error('Failed to install gcloud: ', str(err))
                     sys.exit(1)
             try:
                 host_string = '{}@{}'.format(args.os_user, args.hostname)
@@ -99,11 +100,11 @@ def login_in_gcr(os_user, gcr_creds, odahu_image, datalab_path, cloud_provider):
                      .format(odahu_image, datalab_path, cloud_provider))
             except Exception as err:
                 traceback.print_exc()
-                print('Failed to prepare odahu image: ', str(err))
+                logging.error('Failed to prepare odahu image: ', str(err))
                 sys.exit(1)
         except Exception as err:
             traceback.print_exc()
-            print('Failed to prepare odahu image: ', str(err))
+            logging.error('Failed to prepare odahu image: ', str(err))
             sys.exit(1)
 
 def build_docker_images(image_list):
@@ -182,7 +183,7 @@ def configure_guacamole():
         return True
     except Exception as err:
         traceback.print_exc()
-        print('Failed to configure guacamole: ', str(err))
+        logging.error('Failed to configure guacamole: ', str(err))
         return False
 
 def status_container_removal_cron():
@@ -190,14 +191,14 @@ def status_container_removal_cron():
         conn.sudo('bash -c \'echo "*/15 * * * * datalab-user docker container prune -f --filter until=50m --filter label=edge_status" >> /etc/crontab\'')
     except Exception as err:
         traceback.print_exc()
-        print('Failed to create admin status container removal cron: ', str(err))
+        logging.error('Failed to create admin status container removal cron: ', str(err))
         sys.exit(1)
 
 ##############
 # Run script #
 ##############
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     try:
         global conn
         conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
@@ -205,34 +206,34 @@ if __name__ == "__main__":
     except:
         sys.exit(2)
 
-    print('Modifying configuration files')
+    logging.info('Modifying configuration files')
     try:
         modify_conf_file(args)
     except Exception as err:
-        print('Error:', str(err))
+        logging.info('Error:', str(err))
         sys.exit(1)
 
-    print("Downloading Apache Toree")
+    logging.info("Downloading Apache Toree")
     download_toree()
 
-    print("Installing docker daemon")
+    logging.info("Installing docker daemon")
     if not ensure_docker_daemon(args.datalab_path, args.os_user, args.region):
         sys.exit(1)
 
-    print("Login in Google Container Registry")
+    logging.info("Login in Google Container Registry")
     login_in_gcr(args.os_user, args.gcr_creds, args.odahu_image, args.datalab_path, args.cloud_provider)
 
-    print("Building Datalab images")
+    logging.info("Building Datalab images")
     count = 0
     while not build_docker_images(deeper_config) and count < 5:
         count += 1
         time.sleep(5)
 
-    print("Configuring guacamole")
+    logging.info("Configuring guacamole")
     if not configure_guacamole():
         sys.exit(1)
 
-    print("Adding cron to remove edge status containers")
+    logging.info("Adding cron to remove edge status containers")
     status_container_removal_cron()
 
     conn.close()
diff --git a/infrastructure-provisioning/src/ssn/scripts/configure_gitlab.py b/infrastructure-provisioning/src/ssn/scripts/configure_gitlab.py
index 59e70e1..b248932 100644
--- a/infrastructure-provisioning/src/ssn/scripts/configure_gitlab.py
+++ b/infrastructure-provisioning/src/ssn/scripts/configure_gitlab.py
@@ -28,6 +28,7 @@ import sys
 import subprocess
 from fabric import *
 from datalab.fab import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--keyfile', type=str, default='')
@@ -60,7 +61,7 @@ def create_user(os_user):
         conn.sudo('chmod 600 /home/{0}/.ssh/authorized_keys'.format(os_user))
         conn.sudo('touch /home/{}/.ssh_user_ensured'.format(initial_user))
     except Exception as err:
-        print('Failed to install gitlab.{}'.format(str(err)))
+        logging.error('Failed to install gitlab.{}'.format(str(err)))
         sys.exit(1)
     conn.close()
 
@@ -97,15 +98,15 @@ def prepare_config():
             subprocess.run("sed -i 's/LDAP_ATTR_EMAIL/{}/g' gitlab.rb".format(os.environ['ldap_attr_email']), shell=True, check=True)
 
             subprocess.run("sed -i 's/GITLAB_ROOT_PASSWORD/{}/g' gitlab.rb".format(os.environ['gitlab_root_password']), shell=True, check=True)
-        print('Initial config is ready.')
+        logging.info('Initial config is ready.')
     except Exception as err:
-        print('Failed to install gitlab.{}'.format(str(err)))
+        logging.error('Failed to install gitlab.{}'.format(str(err)))
         sys.exit(1)
 
 
 def install_gitlab():
     try:
-        print('Installing gitlab...')
+        logging.info('Installing gitlab...')
         if os.environ['conf_os_family'] == 'debian':
             conn.sudo('curl -sS https://packages.gitlab.com/install/repositories/gitlab/gitlab-ce/script.deb.sh | sudo bash')
             conn.sudo('apt install gitlab-ce -y')
@@ -113,7 +114,7 @@ def install_gitlab():
             conn.sudo('curl -sS https://packages.gitlab.com/install/repositories/gitlab/gitlab-ce/script.rpm.sh | sudo bash')
             conn.sudo('yum install gitlab-ce -y')
         else:
-            print('Failed to install gitlab.')
+            logging.info('Failed to install gitlab.')
             raise Exception
 
         with lcd('{}tmp/gitlab'.format(os.environ['conf_datalab_path'])):
@@ -134,7 +135,7 @@ def install_gitlab():
 
         conn.sudo('gitlab-ctl reconfigure')
     except Exception as err:
-        print('Failed to install gitlab.{}'.format(str(err)))
+        logging.error('Failed to install gitlab.{}'.format(str(err)))
         sys.exit(1)
 
 
@@ -151,15 +152,15 @@ def configure_gitlab():
                     .format(proto, os.environ['gitlab_root_password'])).stdout.replace('\n','')
             data = json.loads(raw)
             if not json.loads(os.environ['gitlab_signup_enabled']):
-                print('Disabling signup...')
+                logging.info('Disabling signup...')
                 conn.run('curl -k --request PUT "{0}://localhost/api/v4/application/settings?private_token={1}&sudo=root&signup_enabled=false"'
                     .format(proto, data['private_token']))
             if not json.loads(os.environ['gitlab_public_repos']):
-                print('Disabling public repos...')
+                logging.info('Disabling public repos...')
                 conn.run('curl -k --request PUT "{0}://localhost/api/v4/application/settings?private_token={1}&sudo=root&restricted_visibility_levels=public"'
                     .format(proto, data['private_token']))
     except Exception as err:
-        print("Failed to connect to GitLab via API..{}".format(str(err)))
+        logging.error("Failed to connect to GitLab via API..{}".format(str(err)))
         sys.exit(1)
 
 
@@ -175,9 +176,9 @@ def summary():
     data['os_family'] = os.environ['conf_os_family']
     data['os_user'] = os.environ['conf_os_user']
     data['key_name'] = os.environ['conf_key_name']
-    print('[SUMMARY]')
+    logging.info('[SUMMARY]')
     for key in data:
-        print('{0}: {1}'.format(key, data[key]))
+        logging.info('{0}: {1}'.format(key, data[key]))
 
     with open('{}tmp/result/gitlab.json'.format(os.environ['conf_datalab_path']), 'w') as result:
         result.write(json.dumps(data))
diff --git a/infrastructure-provisioning/src/ssn/scripts/configure_mongo.py b/infrastructure-provisioning/src/ssn/scripts/configure_mongo.py
index c0d7f2f..acb9364 100644
--- a/infrastructure-provisioning/src/ssn/scripts/configure_mongo.py
+++ b/infrastructure-provisioning/src/ssn/scripts/configure_mongo.py
@@ -28,6 +28,7 @@ import time
 import yaml
 #from datalab.fab import *
 from pymongo import MongoClient
+from datalab.logger import logging
 
 path = "/etc/mongod.conf"
 outfile = "/etc/mongo_params.yml"
@@ -55,7 +56,7 @@ def add_2_yml_config(path, section, param, value):
             yaml.dump(config_orig, outfile_yml_w, default_flow_style=False)
         return True
     except:
-        print("Could not write the target file")
+        logging.error("Could not write the target file")
         return False
 
 
@@ -66,7 +67,7 @@ def read_yml_conf(path, section, param):
         result = config[section][param]
         return result
     except:
-        print("File does not exist")
+        logging.error("File does not exist")
         return ''
 
 
@@ -94,7 +95,7 @@ if __name__ == "__main__":
             command = ['service', 'mongod', 'restart']
             subprocess.call(command, shell=False)
     except:
-        print("Looks like MongoDB have already been secured")
+        logging.error("Looks like MongoDB have already been secured")
         pass_upd = False
 
     # Generating output config
diff --git a/infrastructure-provisioning/src/ssn/scripts/configure_ssn_node.py b/infrastructure-provisioning/src/ssn/scripts/configure_ssn_node.py
index 03ef2f7..a4d1ef3 100644
--- a/infrastructure-provisioning/src/ssn/scripts/configure_ssn_node.py
+++ b/infrastructure-provisioning/src/ssn/scripts/configure_ssn_node.py
@@ -29,6 +29,7 @@ import traceback
 from datalab.common_lib import *
 from datalab.fab import *
 from datalab.ssn_lib import *
+from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -47,7 +48,7 @@ def set_hostname(subdomain, hosted_zone_name):
         conn.sudo('hostnamectl set-hostname {0}.{1}'.format(subdomain, hosted_zone_name))
     except Exception as err:
         traceback.print_exc()
-        print('Failed to set hostname: ', str(err))
+        logging.error('Failed to set hostname: ', str(err))
         sys.exit(1)
 
 def set_resolve():
@@ -55,7 +56,7 @@ def set_resolve():
         conn.sudo('ln -sf /run/systemd/resolve/resolv.conf /etc/resolv.conf')
     except Exception as err:
         traceback.print_exc()
-        print('Failed to set resolve: ', str(err))
+        logging.error('Failed to set resolve: ', str(err))
         sys.exit(1)
 
 def cp_key(keyfile, host_string, os_user):
@@ -67,7 +68,7 @@ def cp_key(keyfile, host_string, os_user):
         conn.sudo('chmod 600 /home/' + os_user + '/keys/*.pem')
     except Exception as err:
         traceback.print_exc()
-        print('Failed to copy key: ', str(err))
+        logging.error('Failed to copy key: ', str(err))
         sys.exit(1)
 
 
@@ -78,7 +79,7 @@ def cp_backup_scripts(datalab_path):
         conn.run('chmod +x {0}tmp/backup.py {0}tmp/restore.py'.format(datalab_path))
     except Exception as err:
         traceback.print_exc()
-        print('Failed to copy backup scripts: ', str(err))
+        logging.error('Failed to copy backup scripts: ', str(err))
         sys.exit(1)
 
 
@@ -98,7 +99,7 @@ def cp_gitlab_scripts(datalab_path):
         conn.run('cd {}tmp/gitlab && sed -i "s/SERVICE_BASE_NAME/{}/g" gitlab.ini'.format(datalab_path, os.environ['conf_service_base_name']))
     except Exception as err:
         traceback.print_exc()
-        print('Failed to copy gitlab scripts: ', str(err))
+        logging.error('Failed to copy gitlab scripts: ', str(err))
         sys.exit(1)
 
 
@@ -123,7 +124,7 @@ def creating_service_directories(datalab_path, os_user):
             conn.sudo('chown -R ' + os_user + ':' + os_user + ' ' + datalab_path)
     except Exception as err:
         traceback.print_exc()
-        print('Failed to create service directories: ', str(err))
+        logging.error('Failed to create service directories: ', str(err))
         sys.exit(1)
 
 
@@ -191,7 +192,7 @@ def configure_ssl_certs(hostname, custom_ssl_cert):
         conn.sudo('openssl dhparam -out /etc/ssl/certs/dhparam.pem 2048')
     except Exception as err:
         traceback.print_exc()
-        print('Failed to configure SSL certificates: ', str(err))
+        logging.error('Failed to configure SSL certificates: ', str(err))
         sys.exit(1)
 
 def docker_build_script():
@@ -201,7 +202,7 @@ def docker_build_script():
         conn.sudo('mv docker_build /usr/bin/docker-build')
     except Exception as err:
         traceback.print_exc()
-        print('Failed to configure docker_build script: ', str(err))
+        logging.error('Failed to configure docker_build script: ', str(err))
         sys.exit(1)
 
 ##############
@@ -210,7 +211,7 @@ def docker_build_script():
 
 
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     try:
         global conn
         conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
@@ -228,31 +229,31 @@ if __name__ == "__main__":
     else:
         custom_ssl_cert = False
 
-    print('Setting resolve DNS configuration')
+    logging.info('Setting resolve DNS configuration')
     set_resolve()
 
-    print("Creating service directories.")
+    logging.info("Creating service directories.")
     creating_service_directories(args.datalab_path, args.os_user)
 
     if domain_created:
-        print("Setting hostname")
+        logging.info("Setting hostname")
         set_hostname(os.environ['ssn_subdomain'], os.environ['ssn_hosted_zone_name'])
         args.hostname = "{0}.{1}".format(os.environ['ssn_subdomain'], os.environ['ssn_hosted_zone_name'])
 
-    print("Installing nginx as frontend.")
+    logging.info("Installing nginx as frontend.")
     ensure_nginx(args.datalab_path)
 
-    print("Installing Java")
+    logging.info("Installing Java")
     ensure_java(args.os_user)
 
-    print("Configuring ssl key and cert for nginx.")
+    logging.info("Configuring ssl key and cert for nginx.")
     configure_ssl_certs(args.hostname, custom_ssl_cert)
 
-    print("Configuring nginx.")
+    logging.info("Configuring nginx.")
     configure_nginx(deeper_config, args.datalab_path, args.hostname)
 
     if os.environ['conf_letsencrypt_enabled'] == 'true':
-        print("Configuring letsencrypt certificates.")
+        logging.info("Configuring letsencrypt certificates.")
         install_certbot(args.os_user)
         if 'conf_letsencrypt_email' in os.environ:
             run_certbot(os.environ['conf_letsencrypt_domain_name'], 'ssn', os.environ['conf_letsencrypt_email'])
@@ -260,25 +261,25 @@ if __name__ == "__main__":
             run_certbot(os.environ['conf_letsencrypt_domain_name'], 'ssn')
         configure_nginx_LE(os.environ['conf_letsencrypt_domain_name'], 'ssn')
 
-    # print("Installing jenkins.")
+    # logging.info("Installing jenkins.")
     # ensure_jenkins(args.datalab_path)
 
-    # print("Configuring jenkins.")
+    # logging.info("Configuring jenkins.")
     #configure_jenkins(args.datalab_path, args.os_user, deeper_config, args.tag_resource_id)
 
-    print("Copying key")
+    logging.info("Copying key")
     cp_key(args.keyfile, host_string, args.os_user)
 
-    print("Copying backup scripts")
+    logging.info("Copying backup scripts")
     cp_backup_scripts(args.datalab_path)
 
-    print("Copying gitlab scripts & files")
+    logging.info("Copying gitlab scripts & files")
     cp_gitlab_scripts(args.datalab_path)
 
-    print("Ensuring safest ssh ciphers")
+    logging.info("Ensuring safest ssh ciphers")
     ensure_ciphers()
 
-    print("Configuring docker_build script")
+    logging.info("Configuring docker_build script")
     docker_build_script()
 
     conn.close()
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/ssn/scripts/configure_ui.py b/infrastructure-provisioning/src/ssn/scripts/configure_ui.py
index d9327aa..db05276 100644
--- a/infrastructure-provisioning/src/ssn/scripts/configure_ui.py
+++ b/infrastructure-provisioning/src/ssn/scripts/configure_ui.py
@@ -23,7 +23,7 @@
 
 import argparse
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -103,7 +103,7 @@ def copy_ssn_libraries():
             conn.sudo('ln -fs /usr/lib/python3.8/datalab /usr/lib64/python3.8/datalab')
     except Exception as err:
         traceback.print_exc()
-        print('Failed to copy ssn libraries: ', str(err))
+        logging.error('Failed to copy ssn libraries: ', str(err))
         sys.exit(1)
 
 
@@ -142,7 +142,7 @@ def configure_mongo(mongo_passwd, default_endpoint_name):
             args.datalab_path))
     except Exception as err:
         traceback.print_exc()
-        print('Failed to configure MongoDB: ', str(err))
+        logging.error('Failed to configure MongoDB: ', str(err))
         sys.exit(1)
 
 
@@ -179,7 +179,7 @@ def build_ui():
             except:
                 conn.run('if ! grep -w -E "(ERROR)" /tmp/maven.log > /tmp/maven_error.log; then echo "no_error" > /tmp/maven_error.log;fi')
                 conn.run('cat /tmp/maven_error.log')
-                print('Failed to build Back-end: ', str(err))
+                logging.error('Failed to build Back-end: ', str(err))
                 sys.exit(1)
         conn.sudo('mkdir -p {}webapp/'.format(args.datalab_path))
         for service in ['self-service', 'provisioning-service', 'billing']:
@@ -213,7 +213,7 @@ def build_ui():
                     args.datalab_path))
     except Exception as err:
         traceback.print_exc()
-        print('Failed to build UI: ', str(err))
+        logging.error('Failed to build UI: ', str(err))
         sys.exit(1)
 
 
@@ -221,7 +221,7 @@ def build_ui():
 # Run script #
 ##############
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     try:
         global conn
         conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
@@ -230,28 +230,28 @@ if __name__ == "__main__":
     except:
         sys.exit(2)
 
-    print("Copying DataLab libraries to SSN")
+    logging.info("Copying DataLab libraries to SSN")
     copy_ssn_libraries()
 
-    print("Installing Supervisor")
+    logging.info("Installing Supervisor")
     ensure_supervisor()
 
-    print("Installing MongoDB")
+    logging.info("Installing MongoDB")
     ensure_mongo()
 
-    print("Configuring MongoDB")
+    logging.info("Configuring MongoDB")
     configure_mongo(mongo_passwd, args.default_endpoint_name)
 
     conn.sudo('bash -c "echo DATALAB_CONF_DIR={} >> /etc/profile"'.format(datalab_conf_dir))
     conn.sudo('bash -c "echo export DATALAB_CONF_DIR >> /etc/profile"')
 
-    print("Installing build dependencies for UI")
+    logging.info("Installing build dependencies for UI")
     install_build_dep()
 
-    print("Building UI")
+    logging.info("Building UI")
     build_ui()
 
-    print("Starting Self-Service(UI)")
+    logging.info("Starting Self-Service(UI)")
     start_ss(args.keyfile, host_string, datalab_conf_dir, web_path,
              args.os_user, mongo_passwd, keystore_passwd, args.cloud_provider,
              args.service_base_name, args.tag_resource_id, args.billing_tag, args.account_id,
diff --git a/infrastructure-provisioning/src/ssn/scripts/docker_build.py b/infrastructure-provisioning/src/ssn/scripts/docker_build.py
index f0ddafe..37c6526 100644
--- a/infrastructure-provisioning/src/ssn/scripts/docker_build.py
+++ b/infrastructure-provisioning/src/ssn/scripts/docker_build.py
@@ -28,6 +28,7 @@ import subprocess
 from fabric import *
 from os.path import exists
 from os import path
+from datalab.logger import logging
 
 src_path = '/opt/datalab/sources/infrastructure-provisioning/src/'
 if sys.argv[1] == 'all':
@@ -71,7 +72,7 @@ def image_build(src_path, node):
                 subprocess.run('cd {3}; docker build --build-arg OS={0} --file general/files/{1}/{2}_Dockerfile -t docker.datalab-{2} .'.format(
                             os_family, cloud_provider, node[i], src_path), shell=True, check=True)
         except Exception as err:
-            print("Failed to build {} image".format(node[i]), str(err))
+            logging.error("Failed to build {} image".format(node[i]), str(err))
             raise Exception
     except Exception as err:
         traceback.print_exc()
diff --git a/infrastructure-provisioning/src/ssn/scripts/gitlab_deploy.py b/infrastructure-provisioning/src/ssn/scripts/gitlab_deploy.py
index 813ea47..8bcdd7d 100644
--- a/infrastructure-provisioning/src/ssn/scripts/gitlab_deploy.py
+++ b/infrastructure-provisioning/src/ssn/scripts/gitlab_deploy.py
@@ -23,9 +23,10 @@
 
 from ConfigParser import ConfigParser
 from fabric import *
+from datalab.logger import logging
 import argparse
 import boto3
-from botocore.client import Config as botoConfig`
+from botocore.client import Config as botoConfig
 import sys
 import os
 
@@ -49,7 +50,7 @@ def read_ini():
                         if var not in os.environ:
                             os.environ[var] = config.get(section, option)
     except Exception as err:
-        print('Failed to read conf file.{}'.format(str(err)))
+        logging.error('Failed to read conf file.{}'.format(str(err)))
         sys.exit(1)
 
 
@@ -70,14 +71,14 @@ def create_instance():
                                          InstanceType=os.environ['aws_instance_type'],
                                          SubnetId=os.environ['aws_subnet_id'])
         for instance in instances:
-            print('Waiting for instance {} become running.'.format(instance.id))
+            logging.info('Waiting for instance {} become running.'.format(instance.id))
             instance.wait_until_running()
             node_name = '{0}-{1}'.format(os.environ['conf_service_base_name'], os.environ['conf_node_name'])
             instance.create_tags(Tags=[{'Key': 'Name', 'Value': node_name}])
             return instance.id
         return ''
     except Exception as err:
-        print("Failed to create instance.{}".format(str(err)))
+        logging.error("Failed to create instance.{}".format(str(err)))
         sys.exit(1)
 
 
@@ -114,7 +115,7 @@ def get_ami_id(ami_name):
             raise Exception("Unable to find image id with name: " + ami_name)
         return image_id
     except Exception as err:
-        print("Failed to get AMI ID.{}".format(str(err)))
+        logging.error("Failed to get AMI ID.{}".format(str(err)))
 
 
 def create_elastic_ip(instance_id):
@@ -123,9 +124,9 @@ def create_elastic_ip(instance_id):
         response = client.allocate_address(Domain='vpc')
         allocation_id = response.get('AllocationId')
         response = client.associate_address(InstanceId=instance_id, AllocationId=allocation_id)
-        print('Association ID: {}'.format(response.get('AssociationId')))
+        logging.info('Association ID: {}'.format(response.get('AssociationId')))
     except Exception as err:
-        print('Failed to allocate elastic IP.{}'.format(str(err)))
+        logging.error('Failed to allocate elastic IP.{}'.format(str(err)))
         sys.exit(1)
 
 
@@ -137,7 +138,7 @@ def get_ec2_ip(instance_id):
         for instance in instances:
             return getattr(instance, 'public_dns_name')
     except Exception as e:
-        print('Failed to get instance IP.{}'.format(str(e)))
+        logging.error('Failed to get instance IP.{}'.format(str(e)))
         sys.exit(1)
 
 
@@ -147,7 +148,7 @@ def put_to_bucket(bucket_name, local_file, destination_file):
         with open(local_file, 'rb') as data:
             s3.upload_fileobj(data, bucket_name, destination_file, ExtraArgs={'ServerSideEncryption': 'AES256'})
     except Exception as err:
-        print('Unable to upload files to S3 bucket.{}'.format(str(err)))
+        logging.error('Unable to upload files to S3 bucket.{}'.format(str(err)))
         sys.exit(1)
 
 
@@ -156,7 +157,7 @@ def terminate_gitlab():
         ec2 = boto3.resource('ec2')
         client = boto3.client('ec2')
         node_name = '{0}-{1}'.format(os.environ['conf_service_base_name'], os.environ['conf_node_name'])
-        print('Terminating "{}" instance...'.format(node_name))
+        logging.info('Terminating "{}" instance...'.format(node_name))
         inst = ec2.instances.filter(
             Filters=[{'Name': 'instance-state-name', 'Values': ['running', 'stopped', 'pending', 'stopping']},
                      {'Name': 'tag:Name', 'Values': ['{}'.format(node_name)]}])
@@ -175,19 +176,19 @@ def terminate_gitlab():
                                     association_id = el_ip.get('AssociationId')
                                     client.disassociate_address(AssociationId=association_id)
                                     client.release_address(AllocationId=allocation_id)
-                                    print('Releasing Elastic IP: {}'.format(elastic_ip))
+                                    logging.info('Releasing Elastic IP: {}'.format(elastic_ip))
                             except:
-                                print('There is no such Elastic IP: {}'.format(elastic_ip))
+                                logging.error('There is no such Elastic IP: {}'.format(elastic_ip))
                 except Exception as err:
-                    print('There is no Elastic IP to disassociate from instance: {}'.format(instance.id), str(err))
+                    logging.error('There is no Elastic IP to disassociate from instance: {}'.format(instance.id), str(err))
                 client.terminate_instances(InstanceIds=[instance.id])
                 waiter = client.get_waiter('instance_terminated')
                 waiter.wait(InstanceIds=[instance.id])
-                print('The instance {} has been terminated successfully'.format(instance.id))
+                logging.info('The instance {} has been terminated successfully'.format(instance.id))
         else:
-            print('There are no instances with "{}" tag to terminate'.format(node_name))
+            logging.info('There are no instances with "{}" tag to terminate'.format(node_name))
     except Exception as err:
-        print('Failed to terminate gitlab instance. {}'.format(str(err)))
+        logging.error('Failed to terminate gitlab instance. {}'.format(str(err)))
 
 
 if __name__ == "__main__":
@@ -196,11 +197,11 @@ if __name__ == "__main__":
 
     if args.action == 'create':
         instance_id = create_instance()
-        print('Instance {} created.'.format(instance_id))
+        logging.info('Instance {} created.'.format(instance_id))
         create_elastic_ip(instance_id)
         os.environ['instance_id'] = instance_id
         os.environ['instance_hostname'] = get_ec2_ip(instance_id)
-        print('Instance hostname: {}'.format(os.environ['instance_hostname']))
+        logging.info('Instance hostname: {}'.format(os.environ['instance_hostname']))
 
         keyfile = '{}'.format('{}{}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name']))
         params = '--keyfile {0} --instance_ip {1}'.format(keyfile, os.environ['instance_hostname'])
@@ -210,7 +211,7 @@ if __name__ == "__main__":
         try:
             subprocess.run('{0}/{1}.py {2}'.format(head, 'configure_gitlab', params), shell=True, check=True)
         except Exception as err:
-            print('Failed to configure gitlab. {}'.format(str(err)))
+            logging.error('Failed to configure gitlab. {}'.format(str(err)))
             terminate_gitlab()
             sys.exit(1)
 
@@ -224,4 +225,4 @@ if __name__ == "__main__":
         terminate_gitlab()
 
     else:
-        print('Unknown action. Try again.')
+        logging.error('Unknown action. Try again.')
diff --git a/infrastructure-provisioning/src/ssn/scripts/resource_status.py b/infrastructure-provisioning/src/ssn/scripts/resource_status.py
index 7146076..2ff4a0d 100644
--- a/infrastructure-provisioning/src/ssn/scripts/resource_status.py
+++ b/infrastructure-provisioning/src/ssn/scripts/resource_status.py
@@ -24,6 +24,7 @@ import argparse
 import sys
 import yaml
 from pymongo import MongoClient
+from datalab.logger import logging
 
 path = "/etc/mongod.conf"
 outfile = "/etc/mongo_params.yml"
@@ -41,7 +42,7 @@ def read_yml_conf(path, section, param):
         result = config[section][param]
         return result
     except:
-        print("File does not exist")
+        logging.error("File does not exist")
         return ''
 
 
@@ -59,5 +60,5 @@ if __name__ == "__main__":
     try:
         update_resource_status(args.resource, args.status)
     except:
-        print("Unable to update status for the resource {}".format(args.resource))
+        logging.error("Unable to update status for the resource {}".format(args.resource))
         sys.exit(1)
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/ssn/scripts/restore.py b/infrastructure-provisioning/src/ssn/scripts/restore.py
index 9cb9a98..b38fcc0 100644
--- a/infrastructure-provisioning/src/ssn/scripts/restore.py
+++ b/infrastructure-provisioning/src/ssn/scripts/restore.py
@@ -28,6 +28,7 @@ import sys
 import yaml
 import subprocess
 from fabric import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser(description="Restore script for DataLab configs, keys, certs, jars & database")
 parser.add_argument('--datalab_path', type=str, default='/opt/datalab/', help='Path to DataLab. Default: /opt/datalab/')
@@ -60,7 +61,7 @@ def ask(question):
             else:
                 return False
         except:
-            print("Incorrect answer. Try again...")
+            logging.error("Incorrect answer. Try again...")
             continue
 
 
@@ -70,30 +71,30 @@ def restore_prepare():
             head, tail = os.path.split(args.file)
             temp_folder = "/tmp/{}/".format(tail.split(".")[0])
             if os.path.isdir(temp_folder):
-                print("Temporary folder with this backup already exist.")
-                print("Use folder path '{}' in --file key".format(temp_folder))
+                logging.info("Temporary folder with this backup already exist.")
+                logging.info("Use folder path '{}' in --file key".format(temp_folder))
                 raise Exception
-            print("Backup acrhive will be unpacked to: {}".format(temp_folder))
+            logging.info("Backup acrhive will be unpacked to: {}".format(temp_folder))
             subprocess.run("mkdir {}".format(temp_folder), shell=True, check=True)
             subprocess.run("tar -xf {0} -C {1}".format(backup_file, temp_folder), shell=True, check=True)
         elif os.path.isdir(backup_file):
             temp_folder = backup_file
         else:
-            print("Please, specify file or folder. Try --help for more details.")
+            logging.info("Please, specify file or folder. Try --help for more details.")
             raise Exception
-        print("Backup acrhive: {} contains following files (exclude logs):".format(backup_file))
+        logging.info("Backup acrhive: {} contains following files (exclude logs):".format(backup_file))
         subprocess.run("find {} -not -name '*log'".format(temp_folder), shell=True, check=True)
     except Exception as err:
-        print("Failed to open backup.{}".format(str(err)))
+        logging.error("Failed to open backup.{}".format(str(err)))
         sys.exit(1)
 
     try:
         if ask("Maybe you want to create backup of existing configuration before restoring?"):
             with settings(hide('everything')):
-                print("Creating new backup...")
+                logging.info("Creating new backup...")
                 subprocess.run("python3 backup.py --configs all --keys all --certs all --jar all --db", shell=True, check=True)
     except:
-        print("Failed to create new backup.")
+        logging.error("Failed to create new backup.")
         sys.exit(1)
 
     try:
@@ -102,7 +103,7 @@ def restore_prepare():
         else:
             raise Exception
     except:
-        print("Failed to stop all services. Can not continue.")
+        logging.error("Failed to stop all services. Can not continue.")
         sys.exit(1)
 
     return temp_folder
@@ -111,7 +112,7 @@ def restore_prepare():
 def restore_configs():
     try:
         if not os.path.isdir("{0}{1}".format(temp_folder, conf_folder)):
-            print("Config files are not available in this backup.")
+            logging.info("Config files are not available in this backup.")
             raise Exception
 
         configs = list()
@@ -119,12 +120,12 @@ def restore_configs():
             configs = [files for root, dirs, files in os.walk("{0}{1}".format(temp_folder, conf_folder))][0]
         else:
             configs = args.configs.split(",")
-        print("Restore configs: {}".format(configs))
+        logging.info("Restore configs: {}".format(configs))
 
         if args.configs != "skip":
             for filename in configs:
                 if not os.path.isfile("{0}{1}{2}".format(temp_folder, conf_folder, filename)):
-                    print("Config {} are not available in this backup.".format(filename))
+                    logging.info("Config {} are not available in this backup.".format(filename))
                 else:
                     if os.path.isfile("{0}{1}{2}".format(args.datalab_path, conf_folder, filename)):
                         backupfile = "{0}{1}{2}".format(temp_folder, conf_folder, filename)
@@ -133,20 +134,20 @@ def restore_configs():
                             if ask("Config {} was changed, rewrite it?".format(filename)):
                                 subprocess.run("cp -f {0} {1}".format(backupfile, destfile), shell=True, check=True)
                             else:
-                                print("Config {} was skipped.".format(destfile))
+                                logging.info("Config {} was skipped.".format(destfile))
                         else:
-                            print("Config {} was not changed. Skipped.".format(filename))
+                            logging.info("Config {} was not changed. Skipped.".format(filename))
                     else:
-                        print("Config {} does not exist. Creating.".format(filename))
+                        logging.info("Config {} does not exist. Creating.".format(filename))
                         subprocess.run("cp {0}{1}{2} {3}{1}{2}".format(temp_folder, conf_folder, filename, args.datalab_path), shell=True, check=True)
     except:
-        print("Restore configs failed.")
+        logging.error("Restore configs failed.")
 
 
 def restore_keys():
     try:
         if not os.path.isdir("{}keys".format(temp_folder)):
-            print("Key files are not available in this backup.")
+            logging.info("Key files are not available in this backup.")
             raise Exception
 
         keys = list()
@@ -154,33 +155,33 @@ def restore_keys():
             keys = [files for root, dirs, files in os.walk("{}keys".format(temp_folder))][0]
         else:
             keys = args.keys.split(",")
-        print("Restore keys: {}".format(keys))
+        logging.info("Restore keys: {}".format(keys))
 
         if args.keys != "skip":
             for filename in keys:
                 if not os.path.isfile("{0}keys/{1}".format(temp_folder, filename)):
-                    print("Key {} are not available in this backup.".format(filename))
+                    logging.info("Key {} are not available in this backup.".format(filename))
                 else:
                     if os.path.isfile("{0}{1}".format(keys_folder, filename)):
-                        print("Key {} already exist.".format(filename))
+                        logging.info("Key {} already exist.".format(filename))
                         if not filecmp.cmp("{0}keys/{1}".format(temp_folder, filename), "{0}{1}".format(keys_folder, filename)):
                             if ask("Key {} was changed, rewrite it?".format(filename)):
                                 subprocess.run("cp -f {0}keys/{2} {1}{2}".format(temp_folder, keys_folder, filename), shell=True, check=True)
                             else:
-                                print("Key {} was skipped.".format(filename))
+                                logging.info("Key {} was skipped.".format(filename))
                         else:
-                            print("Key {} was not changed. Skipped.".format(filename))
+                            logging.info("Key {} was not changed. Skipped.".format(filename))
                     else:
-                        print("Key {} does not exist. Creating.".format(filename))
+                        logging.info("Key {} does not exist. Creating.".format(filename))
                         subprocess.run("cp {0}keys/{2} {1}{2}".format(temp_folder, keys_folder, filename), shell=True, check=True)
     except:
-        print("Restore keys failed.")
+        logging.error("Restore keys failed.")
 
 
 def restore_certs():
     try:
         if not os.path.isdir("{}certs".format(temp_folder)):
-            print("Cert files are not available in this backup.")
+            logging.info("Cert files are not available in this backup.")
             raise Exception
 
         certs = list()
@@ -188,35 +189,35 @@ def restore_certs():
             certs = [files for root, dirs, files in os.walk("{}certs".format(temp_folder))][0]
         else:
             certs = args.certs.split(",")
-        print("Restore certs: {}".format(certs))
+        logging.info("Restore certs: {}".format(certs))
 
         if args.certs != "skip":
             for filename in certs:
                 if not os.path.isfile("{0}certs/{1}".format(temp_folder, filename)):
-                    print("Cert {} are not available in this backup.".format(filename))
+                    logging.info("Cert {} are not available in this backup.".format(filename))
                 else:
                     if os.path.isfile("{0}{1}".format(certs_folder, filename)):
-                        print("Cert {} already exist.".format(filename))
+                        logging.info("Cert {} already exist.".format(filename))
                         if not filecmp.cmp("{0}certs/{1}".format(temp_folder, filename), "{0}{1}".format(certs_folder, filename)):
                             if ask("Cert {} was changed, rewrite it?".format(filename)):
                                 subprocess.run("sudo cp -f {0}certs/{2} {1}{2}".format(temp_folder, certs_folder, filename), shell=True, check=True)
                                 subprocess.run("sudo chown {0}:{0} {1}{2}".format("root", certs_folder, filename), shell=True, check=True)
                             else:
-                                print("Cert {} was skipped.".format(filename))
+                                logging.info("Cert {} was skipped.".format(filename))
                         else:
-                            print("Cert {} was not changed. Skipped.".format(filename))
+                            logging.info("Cert {} was not changed. Skipped.".format(filename))
                     else:
-                        print("Cert {} does not exist. Creating.".format(filename))
+                        logging.info("Cert {} does not exist. Creating.".format(filename))
                         subprocess.run("sudo cp {0}certs/{2} {1}{2}".format(temp_folder, certs_folder, filename), shell=True, check=True)
                         subprocess.run("sudo chown {0}:{0} {1}{2}".format("root", certs_folder, filename), shell=True, check=True)
     except:
-        print("Restore certs failed.")
+        logging.error("Restore certs failed.")
 
 
 def restore_jars():
     try:
         if not os.path.isdir("{0}jars".format(temp_folder)):
-            print("Jar files are not available in this backup.")
+            logging.info("Jar files are not available in this backup.")
             raise Exception
 
         jars = list()
@@ -224,12 +225,12 @@ def restore_jars():
             jars = [dirs for root, dirs, files in os.walk("{}jars".format(temp_folder))][0]
         else:
             jars = args.jars.split(",")
-        print("Restore jars: {}".format(jars))
+        logging.info("Restore jars: {}".format(jars))
 
         if args.jars != "skip":
             for service in jars:
                 if not os.path.isdir("{0}jars/{1}".format(temp_folder, service)):
-                    print("Jar {} are not available in this backup.".format(service))
+                    logging.info("Jar {} are not available in this backup.".format(service))
                 else:
                     for root, dirs, files in os.walk("{0}jars/{1}".format(temp_folder, service)):
                         for filename in files:
@@ -241,36 +242,36 @@ def restore_jars():
                                     if ask("Jar {} was changed, rewrite it?".format(filename)):
                                         subprocess.run("cp -fP {0} {1}".format(backupfile, destfile), shell=True, check=True)
                                     else:
-                                        print("Jar {} was skipped.".format(destfile))
+                                        logging.info("Jar {} was skipped.".format(destfile))
                                 else:
-                                    print("Jar {} was not changed. Skipped.".format(filename))
+                                    logging.info("Jar {} was not changed. Skipped.".format(filename))
                             else:
-                                print("Jar {} does not exist. Creating.".format(filename))
+                                logging.info("Jar {} does not exist. Creating.".format(filename))
                                 subprocess.run("cp -P {0}jars/{1}/{2} {3}{4}{1}".format(temp_folder, service, filename,
                                                                                args.datalab_path, jars_folder), shell=True, check=True)
     except:
-        print("Restore jars failed.")
+        logging.error("Restore jars failed.")
 
 
 def restore_database():
     try:
-        print("Restore database: {}".format(args.db))
+        logging.info("Restore database: {}".format(args.db))
         if args.db:
             if not os.path.isfile("{0}{1}".format(temp_folder, "mongo.db")):
-                print("File {} are not available in this backup.".format("mongo.db"))
+                logging.info("File {} are not available in this backup.".format("mongo.db"))
                 raise Exception
             else:
                 if ask("Do you want to drop existing database and restore another from backup?"):
                     ssn_conf = open(args.datalab_path + conf_folder + 'ssn.yml').read()
                     data = yaml.load("mongo" + ssn_conf.split("mongo")[-1])
-                    print("Restoring database from backup")
+                    logging.info("Restoring database from backup")
                     subprocess.run("mongorestore --drop --host {0} --port {1} --archive={2}/mongo.db --username {3} --password '{4}' --authenticationDatabase={5}" \
                             .format(data['mongo']['host'], data['mongo']['port'], temp_folder,
                                     data['mongo']['username'], data['mongo']['password'], data['mongo']['database']), shell=True, check=True)
         else:
-            print("Restore database was skipped.")
+            logging.info("Restore database was skipped.")
     except:
-        print("Restore database failed.")
+        logging.error("Restore database failed.")
 
 
 def restore_finalize():
@@ -278,13 +279,13 @@ def restore_finalize():
         if ask("Start all services after restoring?"):
             subprocess.run("sudo supervisorctl start all", shell=True, check=True)
     except:
-        print("Failed to start all services.")
+        logging.error("Failed to start all services.")
 
     try:
         if ask("Clean temporary folder {}?".format(temp_folder)) and temp_folder != "/":
             subprocess.run("rm -rf {}".format(temp_folder), shell=True, check=True)
     except Exception as err:
-        print("Clear temp folder failed. {}".format(str(err)))
+        logging.error("Clear temp folder failed. {}".format(str(err)))
 
 
 if __name__ == "__main__":
@@ -308,4 +309,4 @@ if __name__ == "__main__":
     # Starting services & cleaning tmp folder
     restore_finalize()
 
-    print("Restore is finished. Good luck.")
\ No newline at end of file
+    logging.info("Restore is finished. Good luck.")
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/ssn/scripts/upload_response_file.py b/infrastructure-provisioning/src/ssn/scripts/upload_response_file.py
index 34708c3..b61756f 100644
--- a/infrastructure-provisioning/src/ssn/scripts/upload_response_file.py
+++ b/infrastructure-provisioning/src/ssn/scripts/upload_response_file.py
@@ -22,7 +22,7 @@
 # ******************************************************************************
 
 import argparse
-import logging
+from datalab.logger import logging
 import sys
 from datalab.ssn_lib import *
 from datalab.fab import *
@@ -36,7 +36,7 @@ args = parser.parse_args()
 
 
 def upload_response_file(instance_name, local_log_filepath, os_user):
-    print('Connect to SSN instance with hostname: {0} and name: {1}'.format(args.instance_hostname, instance_name))
+    logging.info('Connect to SSN instance with hostname: {0} and name: {1}'.format(args.instance_hostname, instance_name))
     pkey = "{}{}.pem".format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
     global conn
     conn = datalab.fab.init_datalab_connection(args.instance_hostname, os_user, pkey)
@@ -49,12 +49,12 @@ def upload_response_file(instance_name, local_log_filepath, os_user):
         conn.close()
         return True
     except:
-        print('Failed to upload response file')
+        logging.error('Failed to upload response file')
         return False
 
 
 if __name__ == "__main__":
-    print("Uploading response file")
+    logging.info("Uploading response file")
     if not upload_response_file(args.instance_name, args.local_log_filepath, args.os_user):
         logging.error('Failed to upload response file')
         sys.exit(1)
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/superset/scripts/configure_superset_node.py b/infrastructure-provisioning/src/superset/scripts/configure_superset_node.py
index 5122ab4..f2a7db2 100644
--- a/infrastructure-provisioning/src/superset/scripts/configure_superset_node.py
+++ b/infrastructure-provisioning/src/superset/scripts/configure_superset_node.py
@@ -27,6 +27,7 @@ import sys
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.notebook_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--hostname', type=str, default='')
@@ -50,35 +51,35 @@ gitlab_certfile = os.environ['conf_gitlab_certfile']
 # Run script #
 ##############
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     global conn
     conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
 
     # PREPARE DISK
-    print("Prepare .ensure directory")
+    logging.info("Prepare .ensure directory")
     try:
         if not exists(conn,'/home/' + args.os_user + '/.ensure_dir'):
             conn.sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
     except:
         sys.exit(1)
-    #print("Mount additional volume")
+    #logging.info("Mount additional volume")
     #prepare_disk(args.os_user)
 
     # INSTALL DOCKER COMPOSE
-    print("Installing docker compose")
+    logging.info("Installing docker compose")
     if not ensure_docker_compose(args.os_user):
         sys.exit(1)
 
     # INSTALL UNGIT
-    print("Install nodejs")
+    logging.info("Install nodejs")
     install_nodejs(args.os_user)
-    print("Install ungit")
+    logging.info("Install ungit")
     install_ungit(args.os_user, args.superset_name, args.edge_instance_private_ip)
     if exists(conn, '/home/{0}/{1}'.format(args.os_user, gitlab_certfile)):
         install_gitlab_cert(args.os_user, gitlab_certfile)
 
         # INSTALL INACTIVITY CHECKER
-    print("Install inactivity checker")
+    logging.info("Install inactivity checker")
     install_inactivity_checker(args.os_user, args.ip_address)
 
     # PREPARE SUPERSET
diff --git a/infrastructure-provisioning/src/tensor-rstudio/scripts/configure_tensor-rstudio_node.py b/infrastructure-provisioning/src/tensor-rstudio/scripts/configure_tensor-rstudio_node.py
index e1b7cf4..4772035 100644
--- a/infrastructure-provisioning/src/tensor-rstudio/scripts/configure_tensor-rstudio_node.py
+++ b/infrastructure-provisioning/src/tensor-rstudio/scripts/configure_tensor-rstudio_node.py
@@ -28,6 +28,7 @@ from datalab.actions_lib import *
 from datalab.common_lib import *
 from datalab.fab import *
 from datalab.notebook_lib import *
+from datalab.logger import logging
 from fabric import *
 from patchwork.files import exists
 from patchwork import files
@@ -75,68 +76,68 @@ r_libs = ['R6', 'pbdZMQ={}'.format(os.environ['notebook_pbdzmq_version']), 'RCur
 # Run script #
 ##############
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     global conn
     conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
 
     # PREPARE DISK
-    print("Prepare .ensure directory")
+    logging.info("Prepare .ensure directory")
     try:
         if not exists(conn,'/home/' + args.os_user + '/.ensure_dir'):
             conn.sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
     except:
         sys.exit(1)
-    print("Mount additional volume")
+    logging.info("Mount additional volume")
     prepare_disk(args.os_user)
 
     # INSTALL LANGUAGES
-    print("Install Java")
+    logging.info("Install Java")
     ensure_jre_jdk(args.os_user)
-    print("Install R")
+    logging.info("Install R")
     ensure_r(args.os_user, r_libs)
-    print("Install Python 3 modules")
+    logging.info("Install Python 3 modules")
     ensure_python3_libraries(args.os_user)
 
     # INSTALL PYTHON IN VIRTUALENV
-    print("Configure Python Virtualenv")
+    logging.info("Configure Python Virtualenv")
     ensure_python_venv(python_venv_version)
 
     # INSTALL TENSORFLOW AND OTHER DEEP LEARNING LIBRARIES
-    print("Install TensorFlow")
+    logging.info("Install TensorFlow")
     install_tensor(args.os_user, cuda_version, cuda_file_name,
                    cudnn_version, cudnn_file_name, tensorflow_version,
                    templates_dir, nvidia_version)
-    print("Install Theano")
+    logging.info("Install Theano")
     install_theano(args.os_user, theano_version)
-    print("Installing Keras")
+    logging.info("Installing Keras")
     install_keras(args.os_user, keras_version)
 
     # INSTALL RSTUDIO
-    print("Install RStudio")
+    logging.info("Install RStudio")
     install_rstudio(args.os_user, local_spark_path, args.rstudio_pass, args.rstudio_version, python_venv_version)
 
     # INSTALL SPARK AND CLOUD STORAGE JARS FOR SPARK
-    print("Install local Spark")
+    logging.info("Install local Spark")
     ensure_local_spark(args.os_user, spark_link, spark_version, hadoop_version, local_spark_path )
-    print("Install storage jars")
+    logging.info("Install storage jars")
     ensure_local_jars(args.os_user, jars_dir)
-    print("Configure local Spark")
+    logging.info("Configure local Spark")
     configure_local_spark(jars_dir, templates_dir)
 
     # INSTALL UNGIT
-    print("Install nodejs")
+    logging.info("Install nodejs")
     install_nodejs(args.os_user)
-    print("Install Ungit")
+    logging.info("Install Ungit")
     install_ungit(args.os_user, args.exploratory_name, args.edge_ip)
     if exists(conn, '/home/{0}/{1}'.format(args.os_user, gitlab_certfile)):
         install_gitlab_cert(args.os_user, gitlab_certfile)
 
     # INSTALL INACTIVITY CHECKER
-    print("Install inactivity checker")
+    logging.info("Install inactivity checker")
     install_inactivity_checker(args.os_user, args.ip_address)
 
     # POST INSTALLATION PROCESS
-    print("Updating pyOpenSSL library")
+    logging.info("Updating pyOpenSSL library")
     update_pyopenssl_lib(args.os_user)
 
     conn.close()
diff --git a/infrastructure-provisioning/src/tensor/scripts/configure_tensor_node.py b/infrastructure-provisioning/src/tensor/scripts/configure_tensor_node.py
index c9b5e3f..6808936 100644
--- a/infrastructure-provisioning/src/tensor/scripts/configure_tensor_node.py
+++ b/infrastructure-provisioning/src/tensor/scripts/configure_tensor_node.py
@@ -28,6 +28,7 @@ from datalab.actions_lib import *
 from datalab.common_lib import *
 from datalab.fab import *
 from datalab.notebook_lib import *
+from datalab.logger import logging
 from fabric import *
 from patchwork.files import exists
 from patchwork import files
@@ -75,78 +76,78 @@ cudnn_file_name = os.environ['notebook_cudnn_file_name']
 # Run script #
 ##############
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     global conn
     conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
 
     # PREPARE DISK
-    print("Prepare .ensure directory")
+    logging.info("Prepare .ensure directory")
     try:
         if not exists(conn,'/home/' + args.os_user + '/.ensure_dir'):
             conn.sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
     except:
         sys.exit(1)
-    print("Mount additional volume")
+    logging.info("Mount additional volume")
     prepare_disk(args.os_user)
 
     # INSTALL LANGUAGES
-    print("Install Java")
+    logging.info("Install Java")
     ensure_jre_jdk(args.os_user)
-    print("Install Python 3 modules")
+    logging.info("Install Python 3 modules")
     ensure_python3_libraries(args.os_user)
 
     # INSTALL PYTHON IN VIRTUALENV
-    print("Configure Python Virtualenv")
+    logging.info("Configure Python Virtualenv")
     ensure_python_venv(python_venv_version)
 
     # INSTALL TENSORFLOW AND OTHER DEEP LEARNING LIBRARIES
-    print("Install TensorFlow")
+    logging.info("Install TensorFlow")
     install_tensor(args.os_user, cuda_version, cuda_file_name,
                    cudnn_version, cudnn_file_name, tensorflow_version,
                    templates_dir, nvidia_version)
-    print("Install Theano")
+    logging.info("Install Theano")
     install_theano(args.os_user, theano_version)
-    print("Installing Keras")
+    logging.info("Installing Keras")
     install_keras(args.os_user, keras_version)
 
     # INSTALL JUPYTER NOTEBOOK
-    print("Install Jupyter")
+    logging.info("Install Jupyter")
     configure_jupyter(args.os_user, jupyter_conf_file, templates_dir, jupyter_version, args.exploratory_name)
 
     # INSTALL SPARK AND CLOUD STORAGE JARS FOR SPARK
-    print("Install local Spark")
+    logging.info("Install local Spark")
     ensure_local_spark(args.os_user, spark_link, spark_version, hadoop_version, local_spark_path )
-    print("Install storage jars")
+    logging.info("Install storage jars")
     ensure_local_jars(args.os_user, jars_dir)
-    print("Configure local Spark")
+    logging.info("Configure local Spark")
     configure_local_spark(jars_dir, templates_dir)
 
     # INSTALL JUPYTER KERNELS
-    #print("Install pyspark local kernel for Jupyter")
+    #logging.info("Install pyspark local kernel for Jupyter")
     #ensure_pyspark_local_kernel(args.os_user, pyspark_local_path_dir, templates_dir, spark_version)
-    print("Install py3spark local kernel for Jupyter")
+    logging.info("Install py3spark local kernel for Jupyter")
     ensure_py3spark_local_kernel(args.os_user, py3spark_local_path_dir, templates_dir, spark_version, python_venv_path, python_venv_version)
 
     # INSTALL UNGIT
-    print("Install nodejs")
+    logging.info("Install nodejs")
     install_nodejs(args.os_user)
-    print("Install Ungit")
+    logging.info("Install Ungit")
     install_ungit(args.os_user, args.exploratory_name, args.edge_ip)
     if exists(conn, '/home/{0}/{1}'.format(args.os_user, gitlab_certfile)):
         install_gitlab_cert(args.os_user, gitlab_certfile)
 
     # INSTALL INACTIVITY CHECKER
-    print("Install inactivity checker")
+    logging.info("Install inactivity checker")
     install_inactivity_checker(args.os_user, args.ip_address)
 
     # INSTALL OPTIONAL PACKAGES
-    print("Installing additional Python packages")
+    logging.info("Installing additional Python packages")
     ensure_additional_python_libs(args.os_user)
-    print("Install Matplotlib")
+    logging.info("Install Matplotlib")
     ensure_matplot(args.os_user)
     
     #POST INSTALLATION PROCESS
-    print("Updating pyOpenSSL library")
+    logging.info("Updating pyOpenSSL library")
     update_pyopenssl_lib(args.os_user)
 
     conn.close()
diff --git a/infrastructure-provisioning/src/zeppelin/scripts/configure_zeppelin_node.py b/infrastructure-provisioning/src/zeppelin/scripts/configure_zeppelin_node.py
index 896bc08..5709cf5 100644
--- a/infrastructure-provisioning/src/zeppelin/scripts/configure_zeppelin_node.py
+++ b/infrastructure-provisioning/src/zeppelin/scripts/configure_zeppelin_node.py
@@ -28,6 +28,7 @@ import sys
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.notebook_lib import *
+from datalab.logger import logging
 from fabric import *
 from patchwork.files import exists
 from patchwork import files
@@ -105,7 +106,7 @@ def configure_zeppelin(os_user):
             conn.sudo('cp /opt/zeppelin-' + zeppelin_version + '-bin-netinst/interpreter/md/zeppelin-markdown-*.jar /opt/zeppelin/lib/interpreter/') # necessary when executing paragraph launches java process with "-cp :/opt/zeppelin/lib/interpreter/*:"
             conn.sudo('cp /opt/zeppelin-' + zeppelin_version + '-bin-netinst/interpreter/sh/zeppelin-shell-*.jar /opt/zeppelin/lib/interpreter/')
         except Exception as err:
-            print('Error:', str(err))
+            logging.error('Error:', str(err))
             sys.exit(1)
         try:
             conn.put(templates_dir + 'zeppelin-notebook.service', '/tmp/zeppelin-notebook.service')
@@ -126,7 +127,7 @@ def configure_zeppelin(os_user):
             conn.sudo('''bash -l -c 'echo \"d /var/run/zeppelin 0755 {}\" > /usr/lib/tmpfiles.d/zeppelin.conf' '''.format(os_user))
             conn.sudo('touch /home/' + os_user + '/.ensure_dir/zeppelin_ensured')
         except Exception as err:
-            print('Error:', str(err))
+            logging.error('Error:', str(err))
             sys.exit(1)
 
 
@@ -203,64 +204,64 @@ def install_local_livy(args):
 # Run script #
 ##############
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     global conn
     conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
     deeper_config = json.loads(args.additional_config)
 
     # PREPARE DISK
-    print("Prepare .ensure directory")
+    logging.info("Prepare .ensure directory")
     try:
         if not exists(conn,'/home/' + args.os_user + '/.ensure_dir'):
             conn.sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
     except:
         sys.exit(1)
-    print("Mount additional volume")
+    logging.info("Mount additional volume")
     prepare_disk(args.os_user)
 
     # INSTALL LANGUAGES
-    print("Install Java")
+    logging.info("Install Java")
     ensure_jre_jdk(args.os_user)
-    print("Installing Scala")
+    logging.info("Installing Scala")
     ensure_scala(scala_link, args.scala_version, args.os_user)
     if os.environ['notebook_r_enabled'] == 'true':
-        print("Installing R")
+        logging.info("Installing R")
         ensure_r(args.os_user, r_libs)
-    print("Install Python 3 modules")
+    logging.info("Install Python 3 modules")
     ensure_python3_libraries(args.os_user)
 
     # INSTALL PYTHON IN VIRTUALENV
-    print("Configure Python Virtualenv")
+    logging.info("Configure Python Virtualenv")
     ensure_python_venv(python_venv_version)
-    #print("Install Python 3 specific version")
+    #logging.info("Install Python 3 specific version")
     #ensure_python3_specific_version(python3_version, args.os_user)
 
     # INSTALL SPARK AND CLOUD STORAGE JARS FOR SPARK
-    print("Install local Spark")
+    logging.info("Install local Spark")
     ensure_local_spark(args.os_user, spark_link, args.spark_version, args.hadoop_version, local_spark_path)
-    print("Install storage jars")
+    logging.info("Install storage jars")
     ensure_local_jars(args.os_user, jars_dir)
-    print("Configure local Spark")
+    logging.info("Configure local Spark")
     configure_local_spark(jars_dir, templates_dir)
 
     # INSTALL ZEPPELIN
-    print("Install Zeppelin")
+    logging.info("Install Zeppelin")
     configure_zeppelin(args.os_user)
 
     # INSTALL ZEPPELIN KERNELS
     if args.multiple_clusters == 'true':
-        print("Installing Livy for local kernels")
+        logging.info("Installing Livy for local kernels")
         install_local_livy(args)
-        print("Configuring local kernels")
+        logging.info("Configuring local kernels")
         configure_local_livy_kernels(args)
     else:
-        print("Configuring local kernels")
+        logging.info("Configuring local kernels")
         configure_local_spark_kernels(args, python_venv_path)
 
     # INSTALL UNGIT
-    print("Install nodejs")
+    logging.info("Install nodejs")
     install_nodejs(args.os_user)
-    print("Install Ungit")
+    logging.info("Install Ungit")
     install_ungit(args.os_user, args.exploratory_name, args.edge_ip)
     if exists(conn, '/home/{0}/{1}'.format(args.os_user, gitlab_certfile)):
         install_gitlab_cert(args.os_user, gitlab_certfile)
@@ -268,20 +269,20 @@ if __name__ == "__main__":
     conn.sudo('cp /home/{}/.git/templates/hooks/pre-commit /opt/zeppelin/notebook/.git/hooks/'.format(args.os_user))
 
     # INSTALL INACTIVITY CHECKER
-    print("Install inactivity checker")
+    logging.info("Install inactivity checker")
     install_inactivity_checker(args.os_user, args.ip_address)
 
     # INSTALL OPTIONAL PACKAGES
     if os.environ['notebook_r_enabled'] == 'true':
-        print("Install additional R packages")
+        logging.info("Install additional R packages")
         install_r_packages(args.os_user)
-    print("Install additional Python packages")
+    logging.info("Install additional Python packages")
     ensure_additional_python_libs(args.os_user)
-    print("Install Matplotlib.")
+    logging.info("Install Matplotlib.")
     ensure_matplot(args.os_user)
     
     #POST INSTALLATION PROCESS
-    print("Updating pyOpenSSL library")
+    logging.info("Updating pyOpenSSL library")
     update_pyopenssl_lib(args.os_user)
 
     conn.close()

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org


[incubator-datalab] 03/04: [DATALAB-2409]: replaced print with logging in all general/scripts/gcp .py scripts

Posted by lf...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

lfrolov pushed a commit to branch DATALAB-2409
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git

commit d9593a5667945fadbbbc6727b01cb0e789ca4624
Author: leonidfrolov <fr...@gmail.com>
AuthorDate: Mon Oct 11 17:51:48 2021 +0300

    [DATALAB-2409]: replaced print with logging in all general/scripts/gcp .py scripts
---
 .../src/general/scripts/gcp/common_collect_data.py |  5 ++-
 .../general/scripts/gcp/common_create_bucket.py    |  5 ++-
 .../general/scripts/gcp/common_create_firewall.py  |  9 ++--
 .../general/scripts/gcp/common_create_instance.py  |  5 ++-
 .../general/scripts/gcp/common_create_nat_route.py |  5 ++-
 .../scripts/gcp/common_create_notebook_image.py    |  9 ++--
 .../scripts/gcp/common_create_service_account.py   | 15 ++++---
 .../general/scripts/gcp/common_create_subnet.py    |  9 ++--
 .../scripts/gcp/common_download_git_certfile.py    |  5 ++-
 .../src/general/scripts/gcp/common_install_gpu.py  |  5 ++-
 ...common_notebook_configure_dataengine-service.py | 15 ++-----
 .../gcp/common_notebook_configure_dataengine.py    | 15 ++-----
 .../general/scripts/gcp/common_prepare_notebook.py | 22 +++------
 .../src/general/scripts/gcp/common_reupload_key.py |  3 +-
 .../general/scripts/gcp/common_start_notebook.py   | 22 +++------
 .../general/scripts/gcp/common_stop_notebook.py    | 32 +++++--------
 .../scripts/gcp/common_terminate_notebook.py       | 29 +++++-------
 .../scripts/gcp/dataengine-service_configure.py    | 44 +++++++-----------
 .../scripts/gcp/dataengine-service_create.py       |  5 ++-
 .../scripts/gcp/dataengine-service_install_libs.py | 14 ++----
 .../scripts/gcp/dataengine-service_list_libs.py    | 13 ++----
 .../scripts/gcp/dataengine-service_prepare.py      | 15 ++-----
 .../scripts/gcp/dataengine-service_terminate.py    | 22 +++------
 .../general/scripts/gcp/dataengine_configure.py    | 41 +++++------------
 .../src/general/scripts/gcp/dataengine_prepare.py  | 21 +++------
 .../src/general/scripts/gcp/dataengine_start.py    | 16 ++-----
 .../src/general/scripts/gcp/dataengine_stop.py     | 15 ++-----
 .../general/scripts/gcp/dataengine_terminate.py    | 17 +++----
 .../general/scripts/gcp/deeplearning_configure.py  | 46 +++++++------------
 .../src/general/scripts/gcp/edge_configure.py      | 41 ++++++-----------
 .../general/scripts/gcp/edge_create_static_ip.py   | 13 ++----
 .../src/general/scripts/gcp/edge_start.py          | 23 +++-------
 .../src/general/scripts/gcp/edge_status.py         | 15 ++-----
 .../src/general/scripts/gcp/edge_stop.py           | 14 ++----
 .../src/general/scripts/gcp/edge_terminate.py      | 32 +++++--------
 .../src/general/scripts/gcp/jupyter_configure.py   | 48 +++++++-------------
 .../general/scripts/gcp/jupyterlab_configure.py    | 48 +++++++-------------
 .../src/general/scripts/gcp/project_prepare.py     | 36 +++++----------
 .../src/general/scripts/gcp/project_terminate.py   | 43 +++++++-----------
 .../src/general/scripts/gcp/rstudio_configure.py   | 48 +++++++-------------
 .../rstudio_dataengine-service_create_configs.py   |  5 ++-
 .../src/general/scripts/gcp/ssn_configure.py       | 52 ++++++++--------------
 .../general/scripts/gcp/ssn_create_static_ip.py    | 12 ++---
 .../src/general/scripts/gcp/ssn_create_vpc.py      |  7 +--
 .../src/general/scripts/gcp/ssn_finalize.py        |  3 +-
 .../src/general/scripts/gcp/ssn_prepare.py         | 22 +++------
 .../src/general/scripts/gcp/ssn_terminate.py       | 15 ++-----
 .../scripts/gcp/ssn_terminate_gcp_resources.py     | 45 ++++++++++---------
 .../src/general/scripts/gcp/superset_configure.py  | 52 ++++++++--------------
 .../scripts/gcp/tensor-rstudio_configure.py        | 50 ++++++++-------------
 .../src/general/scripts/gcp/tensor_configure.py    | 48 +++++++-------------
 .../src/general/scripts/gcp/zeppelin_configure.py  | 42 ++++++-----------
 52 files changed, 420 insertions(+), 773 deletions(-)

diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_collect_data.py b/infrastructure-provisioning/src/general/scripts/gcp/common_collect_data.py
index 02a3dcb..1f8527e 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_collect_data.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_collect_data.py
@@ -29,6 +29,7 @@ import traceback
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -54,13 +55,13 @@ if __name__ == "__main__":
             data_instances = GCPMeta().get_list_instance_statuses(id_hosts)
             statuses['host'] = data_instances
         except:
-            print("Hosts JSON wasn't been provided")
+            logging.error("Hosts JSON wasn't been provided")
         try:
             id_clusters = get_id_resourses(data.get('cluster'))
             data_clusters = GCPMeta().get_list_cluster_statuses(id_clusters, full_check=False)
             statuses['cluster'] = data_clusters
         except:
-            print("Clusters JSON wasn't been provided")
+            logging.error("Clusters JSON wasn't been provided")
         with open('/root/result.json', 'w') as outfile:
             json.dump(statuses, outfile)
     except Exception as err:
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_create_bucket.py b/infrastructure-provisioning/src/general/scripts/gcp/common_create_bucket.py
index 061746a..34352e5 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_create_bucket.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_create_bucket.py
@@ -26,6 +26,7 @@ import json
 import sys
 from datalab.actions_lib import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--bucket_name', type=str, default='')
@@ -35,9 +36,9 @@ args = parser.parse_args()
 if __name__ == "__main__":
     if args.bucket_name:
         if GCPMeta().get_bucket(args.bucket_name):
-            print("REQUESTED BUCKET {} ALREADY EXISTS".format(args.bucket_name))
+            logging.info("REQUESTED BUCKET {} ALREADY EXISTS".format(args.bucket_name))
         else:
-            print("Creating Bucket {}".format(args.bucket_name))
+            logging.info("Creating Bucket {}".format(args.bucket_name))
             GCPActions().create_bucket(args.bucket_name)
             GCPActions().add_bucket_labels(args.bucket_name, json.loads(args.tags))
     else:
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_create_firewall.py b/infrastructure-provisioning/src/general/scripts/gcp/common_create_firewall.py
index aa126c5..2ef8b7b 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_create_firewall.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_create_firewall.py
@@ -26,6 +26,7 @@ import json
 import sys
 from datalab.actions_lib import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--firewall', type=str)
@@ -36,15 +37,15 @@ if __name__ == "__main__":
     if firewall:
         for firewall_rule in firewall['ingress']:
             if GCPMeta().get_firewall(firewall_rule['name']):
-                print("REQUESTED INGRESS FIREWALL {} ALREADY EXISTS".format(firewall_rule['name']))
+                logging.info("REQUESTED INGRESS FIREWALL {} ALREADY EXISTS".format(firewall_rule['name']))
             else:
-                print("Creating Ingress Firewall {}".format(firewall_rule['name']))
+                logging.info("Creating Ingress Firewall {}".format(firewall_rule['name']))
                 GCPActions().create_firewall(firewall_rule)
         for firewall_rule in firewall['egress']:
             if GCPMeta().get_firewall(firewall_rule['name']):
-                print("REQUESTED EGRESS FIREWALL {} ALREADY EXISTS".format(firewall_rule['name']))
+                logging.info("REQUESTED EGRESS FIREWALL {} ALREADY EXISTS".format(firewall_rule['name']))
             else:
-                print("Creating Egress Firewall {}".format(firewall_rule['name']))
+                logging.info("Creating Egress Firewall {}".format(firewall_rule['name']))
                 GCPActions().create_firewall(firewall_rule)
     else:
         parser.print_help()
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_create_instance.py b/infrastructure-provisioning/src/general/scripts/gcp/common_create_instance.py
index b62f882..d780b44 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_create_instance.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_create_instance.py
@@ -26,6 +26,7 @@ import json
 import sys
 from datalab.actions_lib import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--instance_name', type=str, default='')
@@ -55,9 +56,9 @@ args = parser.parse_args()
 if __name__ == "__main__":
     if args.instance_name:
         if GCPMeta().get_instance(args.instance_name):
-            print("REQUESTED INSTANCE {} ALREADY EXISTS".format(args.instance_name))
+            logging.info("REQUESTED INSTANCE {} ALREADY EXISTS".format(args.instance_name))
         else:
-            print("Creating Instance {}".format(args.instance_name))
+            logging.info("Creating Instance {}".format(args.instance_name))
             GCPActions().create_instance(args.instance_name, args.service_base_name, args.cluster_name, args.region, args.zone,
                                          args.vpc_name, args.subnet_name,
                                          args.instance_size, args.ssh_key_path, args.initial_user, args.image_name,
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_create_nat_route.py b/infrastructure-provisioning/src/general/scripts/gcp/common_create_nat_route.py
index d9a5f0b..f1d49fb 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_create_nat_route.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_create_nat_route.py
@@ -25,6 +25,7 @@ import argparse
 import sys
 from datalab.actions_lib import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--nat_route_name', type=str)
@@ -35,9 +36,9 @@ args = parser.parse_args()
 
 if __name__ == "__main__":
     if GCPMeta().get_route(args.nat_route_name):
-        print("REQUESTED ROUTE {} ALREADY EXISTS".format(args.nat_route_name))
+        logging.info("REQUESTED ROUTE {} ALREADY EXISTS".format(args.nat_route_name))
     else:
-        print("Creating NAT ROUTE {}".format(args.nat_route_name))
+        logging.info("Creating NAT ROUTE {}".format(args.nat_route_name))
         params = {
             "destRange": "0.0.0.0/0",
             "name": args.nat_route_name,
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_create_notebook_image.py b/infrastructure-provisioning/src/general/scripts/gcp/common_create_notebook_image.py
index f7ba4fd..1be0d2e 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_create_notebook_image.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_create_notebook_image.py
@@ -24,6 +24,7 @@
 import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
+from datalab.logger import logging
 import json
 import os
 import sys
@@ -63,19 +64,19 @@ if __name__ == "__main__":
                                                                        image_conf['endpoint_name'],
                                                                        image_conf['exploratory_name'])
         image_conf['zone'] = os.environ['gcp_zone']
-        print('[CREATING IMAGE]')
+        logging.info('[CREATING IMAGE]')
         primary_image_id = GCPMeta.get_image_by_name(image_conf['expected_primary_image_name'])
         if primary_image_id == '':
             image_id_list = GCPActions.create_image_from_instance_disks(
                 image_conf['expected_primary_image_name'], image_conf['expected_secondary_image_name'],
                 image_conf['instance_name'], image_conf['zone'], image_conf['image_labels'])
             if image_id_list and image_id_list[0] != '':
-                print("Image of primary disk was successfully created. It's ID is {}".format(image_id_list[0]))
+                logging.info("Image of primary disk was successfully created. It's ID is {}".format(image_id_list[0]))
             else:
-                print("Looks like another image creating operation for your template have been started a "
+                logging.info("Looks like another image creating operation for your template have been started a "
                       "moment ago.")
             if image_id_list and image_id_list[1] != '':
-                print("Image of secondary disk was successfully created. It's ID is {}".format(image_id_list[1]))
+                logging.info("Image of secondary disk was successfully created. It's ID is {}".format(image_id_list[1]))
 
             with open("/root/result.json", 'w') as result:
                 res = {"primary_image_name": image_conf['expected_primary_image_name'],
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_create_service_account.py b/infrastructure-provisioning/src/general/scripts/gcp/common_create_service_account.py
index d1e9a55..44f33ee 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_create_service_account.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_create_service_account.py
@@ -25,6 +25,7 @@ import argparse
 import sys
 from datalab.actions_lib import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--service_account_name', type=str, default='')
@@ -39,16 +40,16 @@ args = parser.parse_args()
 if __name__ == "__main__":
     if args.service_account_name != '':
         if GCPMeta().get_service_account(args.service_account_name, args.service_base_name):
-            print("REQUESTED SERVICE ACCOUNT {} ALREADY EXISTS".format(args.service_account_name))
+            logging.info("REQUESTED SERVICE ACCOUNT {} ALREADY EXISTS".format(args.service_account_name))
         else:
-            print("Creating Service account {}".format(args.service_account_name))
+            logging.info("Creating Service account {}".format(args.service_account_name))
             GCPActions().create_service_account(args.service_account_name, args.service_base_name, args.unique_index)
             if GCPMeta().get_role(args.role_name):
                 if GCPMeta().get_role_status(args.role_name) == True:
-                    print('Restoring deleted role')
+                    logging.info('Restoring deleted role')
                     GCPActions().undelete_role(args.role_name)
                 else:
-                    print("REQUESTED ROLE {} ALREADY EXISTS".format(args.role_name))
+                    logging.info("REQUESTED ROLE {} ALREADY EXISTS".format(args.role_name))
             else:
                 if args.policy_path == '':
                     permissions = []
@@ -56,12 +57,12 @@ if __name__ == "__main__":
                     with open(args.policy_path, 'r') as f:
                         json_file = f.read()
                     permissions = json.loads(json_file)
-                print("Creating Role {}".format(args.role_name))
+                logging.info("Creating Role {}".format(args.role_name))
                 GCPActions().create_role(args.role_name, permissions)
-            print("Assigning custom role to Service account.")
+            logging.info("Assigning custom role to Service account.")
             GCPActions().set_role_to_service_account(args.service_account_name, args.role_name, args.service_base_name)
             if args.roles_path != '':
-                print("Assigning predefined roles to Service account.")
+                logging.info("Assigning predefined roles to Service account.")
                 with open(args.roles_path, 'r') as f:
                     json_file = f.read()
                 predefined_roles = json.loads(json_file)
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_create_subnet.py b/infrastructure-provisioning/src/general/scripts/gcp/common_create_subnet.py
index 194e108..1153aad 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_create_subnet.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_create_subnet.py
@@ -26,6 +26,7 @@ import ipaddress
 import sys
 from datalab.actions_lib import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--subnet_name', type=str, default='')
@@ -105,17 +106,17 @@ if __name__ == "__main__":
             existed_subnet_list.append(GCPMeta().get_subnet(subnet.split('/')[-1], args.region)['ipCidrRange'])
         available_subnets = list(set(pre_defined_subnet_list) - set(existed_subnet_list))
         if not available_subnets:
-            print("There is no available subnet to create. Aborting...")
+            logging.info("There is no available subnet to create. Aborting...")
             sys.exit(1)
         else:
             datalab_subnet_cidr = available_subnets[0]
 
     if args.subnet_name != '':
         if GCPMeta().get_subnet(args.subnet_name, args.region):
-            print("REQUESTED SUBNET {} ALREADY EXISTS".format(args.subnet_name))
+            logging.info("REQUESTED SUBNET {} ALREADY EXISTS".format(args.subnet_name))
         else:
-            print("Creating Subnet {}".format(args.subnet_name))
+            logging.info("Creating Subnet {}".format(args.subnet_name))
             GCPActions().create_subnet(args.subnet_name, datalab_subnet_cidr, args.vpc_selflink, args.region)
     else:
-        print("Subnet name can't be empty")
+        logging.info("Subnet name can't be empty")
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_download_git_certfile.py b/infrastructure-provisioning/src/general/scripts/gcp/common_download_git_certfile.py
index c2a3644..7256d4e 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_download_git_certfile.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_download_git_certfile.py
@@ -25,6 +25,7 @@ import argparse
 import os
 from datalab.actions_lib import *
 from fabric import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--keyfile', type=str, default='')
@@ -42,8 +43,8 @@ if __name__ == "__main__":
     if GCPActions().get_gitlab_cert(bucket_name, gitlab_certfile):
         conn.put(gitlab_certfile, gitlab_certfile)
         conn.sudo('chown root:root {}'.format(gitlab_certfile))
-        print('{} has been downloaded'.format(gitlab_certfile))
+        logging.info('{} has been downloaded'.format(gitlab_certfile))
     else:
-        print('There is no {} to download'.format(gitlab_certfile))
+        logging.info('There is no {} to download'.format(gitlab_certfile))
 
     conn.close()
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_install_gpu.py b/infrastructure-provisioning/src/general/scripts/gcp/common_install_gpu.py
index 733236d..4f85b9b 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_install_gpu.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_install_gpu.py
@@ -26,6 +26,7 @@ import os
 import sys
 from datalab.fab import *
 from datalab.notebook_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--hostname', type=str, default='')
@@ -37,11 +38,11 @@ args = parser.parse_args()
 # Run script #
 ##############
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     global conn
     conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
 
-    print('Installing GPU drivers')
+    logging.info('Installing GPU drivers')
     install_nvidia_drivers(args.os_user)
 
     conn.close()
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_notebook_configure_dataengine-service.py b/infrastructure-provisioning/src/general/scripts/gcp/common_notebook_configure_dataengine-service.py
index 7273709..3305eb5 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_notebook_configure_dataengine-service.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_notebook_configure_dataengine-service.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -41,17 +41,10 @@ def clear_resources():
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     # generating variables dictionary
     GCPMeta = datalab.meta_lib.GCPMeta()
     GCPActions = datalab.actions_lib.GCPActions()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     notebook_config = dict()
     notebook_config['service_base_name'] = (os.environ['conf_service_base_name'])
     notebook_config['notebook_name'] = os.environ['notebook_instance_name']
@@ -97,7 +90,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[INSTALLING KERNELS INTO SPECIFIED NOTEBOOK]')
-        print('[INSTALLING KERNELS INTO SPECIFIED NOTEBOOK]')
         params = "--bucket {} --cluster_name {} --dataproc_version {} --keyfile {} --notebook_ip {} --region {} " \
                  "--edge_user_name {} --project_name {} --os_user {}  --edge_hostname {} --proxy_port {} " \
                  "--scala_version {} --application {}" \
@@ -118,7 +110,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[UPDATING SPARK CONFIGURATION FILES ON NOTEBOOK]')
-        print('[UPDATING SPARK CONFIGURATION FILES ON NOTEBOOK]')
         params = "--hostname {0} " \
                  "--keyfile {1} " \
                  "--os_user {2} " \
@@ -140,7 +131,7 @@ if __name__ == "__main__":
             res = {"notebook_name": notebook_config['notebook_name'],
                    "Tag_name": notebook_config['tag_name'],
                    "Action": "Configure notebook server"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_notebook_configure_dataengine.py b/infrastructure-provisioning/src/general/scripts/gcp/common_notebook_configure_dataengine.py
index c6e2d7a..6f79458 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_notebook_configure_dataengine.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_notebook_configure_dataengine.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -41,18 +41,11 @@ def clear_resources():
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     try:
         GCPMeta = datalab.meta_lib.GCPMeta()
         GCPActions = datalab.actions_lib.GCPActions()
         # generating variables dictionary
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         notebook_config = dict()
         if 'exploratory_name' in os.environ:
             notebook_config['exploratory_name'] = os.environ['exploratory_name'].replace('_', '-').lower()
@@ -93,7 +86,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[INSTALLING KERNELS INTO SPECIFIED NOTEBOOK]')
-        print('[INSTALLING KERNELS INTO SPECIFIED NOTEBOOK]')
         params = "--cluster_name {0} --spark_version {1} --hadoop_version {2} --os_user {3} --spark_master {4}" \
                  " --keyfile {5} --notebook_ip {6} --spark_master_ip {7}".\
             format(notebook_config['cluster_name'], os.environ['notebook_spark_version'],
@@ -112,7 +104,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[UPDATING SPARK CONFIGURATION FILES ON NOTEBOOK]')
-        print('[UPDATING SPARK CONFIGURATION FILES ON NOTEBOOK]')
         params = "--hostname {0} " \
                  "--keyfile {1} " \
                  "--os_user {2} " \
@@ -135,7 +126,7 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"notebook_name": notebook_config['notebook_name'],
                    "Action": "Configure notebook server"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_prepare_notebook.py b/infrastructure-provisioning/src/general/scripts/gcp/common_prepare_notebook.py
index 96d1a3b..dbacfab 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_prepare_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_prepare_notebook.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -34,16 +34,10 @@ from fabric import *
 
 if __name__ == "__main__":
     instance_class = 'notebook'
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         GCPMeta = datalab.meta_lib.GCPMeta()
         GCPActions = datalab.actions_lib.GCPActions()
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         notebook_config = dict()
         notebook_config['service_base_name'] = (os.environ['conf_service_base_name'])
         notebook_config['edge_user_name'] = (os.environ['edge_user_name'])
@@ -59,7 +53,6 @@ if __name__ == "__main__":
                                                                             notebook_config['endpoint_tag']))
         if edge_status != 'RUNNING':
             logging.info('ERROR: Edge node is unavailable! Aborting...')
-            print('ERROR: Edge node is unavailable! Aborting...')
             ssn_hostname = GCPMeta.get_private_ip_address(notebook_config['service_base_name'] + '-ssn')
             datalab.fab.put_resource_status('edge', 'Unavailable', os.environ['ssn_datalab_path'],
                                             os.environ['conf_os_user'],
@@ -118,7 +111,7 @@ if __name__ == "__main__":
             notebook_config['service_base_name'], notebook_config['project_name'], notebook_config['endpoint_name'],
             os.environ['application'], os.environ['notebook_image_name'].replace('_', '-').lower()) if (x != 'None' and x != '')
             else notebook_config['expected_primary_image_name'])(str(os.environ.get('notebook_image_name')))
-        print('Searching pre-configured images')
+        logging.info('Searching pre-configured images')
 
         deeplearning_ami = 'false'
 
@@ -131,7 +124,7 @@ if __name__ == "__main__":
         if notebook_config['primary_image_name'] == '':
             notebook_config['primary_image_name'] = os.environ['gcp_{}_image_name'.format(os.environ['conf_os_family'])]
         else:
-            print('Pre-configured primary image found. Using: {}'.format(
+            logging.info('Pre-configured primary image found. Using: {}'.format(
                 notebook_config['primary_image_name'].get('name')))
             if deeplearning_ami == 'true':
                 notebook_config['primary_image_name'] = 'projects/deeplearning-platform-release/global/images/{}'.format(
@@ -150,7 +143,7 @@ if __name__ == "__main__":
         if notebook_config['secondary_image_name'] == '':
             notebook_config['secondary_image_name'] = 'None'
         else:
-            print('Pre-configured secondary image found. Using: {}'.format(
+            logging.info('Pre-configured secondary image found. Using: {}'.format(
                 notebook_config['secondary_image_name'].get('name')))
             notebook_config['secondary_image_name'] = 'global/images/{}'.format(
                 notebook_config['secondary_image_name'].get('name'))
@@ -173,11 +166,11 @@ if __name__ == "__main__":
             data = {"notebook_name": notebook_config['instance_name'], "error": ""}
             json.dump(data, f)
 
-        print('Additional tags will be added: {}'.format(os.environ['tags']))
+        logging.info('Additional tags will be added: {}'.format(os.environ['tags']))
         additional_tags = os.environ['tags'].replace("': '", ":").replace("', '", ",").replace("{'", "" ).replace(
             "'}", "").lower()
 
-        print('Additional tags will be added: {}'.format(additional_tags))
+        logging.info('Additional tags will be added: {}'.format(additional_tags))
         notebook_config['labels'] = {"name": notebook_config['instance_name'],
                                      "sbn": notebook_config['service_base_name'],
                                      "product": "datalab"
@@ -196,7 +189,6 @@ if __name__ == "__main__":
     # launching instance for notebook server
     try:
         logging.info('[CREATE NOTEBOOK INSTANCE]')
-        print('[CREATE NOTEBOOK INSTANCE]')
         params = "--instance_name {0} --region {1} --zone {2} --vpc_name {3} --subnet_name {4} --instance_size {5} " \
                  "--ssh_key_path {6} --initial_user {7} --service_account_name {8} --image_name {9} " \
                  "--secondary_image_name {10} --instance_class {11} --primary_disk_size {12} " \
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_reupload_key.py b/infrastructure-provisioning/src/general/scripts/gcp/common_reupload_key.py
index 0119977..e50061f 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_reupload_key.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_reupload_key.py
@@ -27,6 +27,7 @@ import subprocess
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -48,5 +49,5 @@ if __name__ == "__main__":
         try:
             subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except Exception as err:
-            print('Error: {0}'.format(err))
+            logging.error('Error: {0}'.format(err))
             sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_start_notebook.py b/infrastructure-provisioning/src/general/scripts/gcp/common_start_notebook.py
index fe45998..f63d6fb 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_start_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_start_notebook.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -33,16 +33,10 @@ import subprocess
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     # generating variables dictionary
     GCPMeta = datalab.meta_lib.GCPMeta()
     GCPActions = datalab.actions_lib.GCPActions()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     notebook_config = dict()
     notebook_config['service_base_name'] = (os.environ['conf_service_base_name'])
     notebook_config['notebook_name'] = os.environ['notebook_instance_name']
@@ -50,9 +44,8 @@ if __name__ == "__main__":
 
     try:
         logging.info('[START NOTEBOOK]')
-        print('[START NOTEBOOK]')
         try:
-            print("Starting notebook")
+            logging.info("Starting notebook")
             GCPActions.start_instance(notebook_config['notebook_name'], notebook_config['zone'])
         except Exception as err:
             traceback.print_exc()
@@ -63,7 +56,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[SETUP USER GIT CREDENTIALS]')
-        print('[SETUP USER GIT CREDENTIALS]')
         notebook_config['notebook_ip'] = GCPMeta.get_private_ip_address(notebook_config['notebook_name'])
         notebook_config['keyfile'] = '{0}{1}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
@@ -79,7 +71,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[UPDATE LAST ACTIVITY TIME]')
-        print('[UPDATE LAST ACTIVITY TIME]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(os.environ['conf_os_user'], notebook_config['notebook_ip'], notebook_config['keyfile'])
         try:
@@ -92,16 +83,15 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['notebook_name']))
-        print("Private IP: {}".format(notebook_config['notebook_ip']))
+        logging.info("Instance name: {}".format(notebook_config['notebook_name']))
+        logging.info("Private IP: {}".format(notebook_config['notebook_ip']))
         with open("/root/result.json", 'w') as result:
             res = {"hostname": notebook_config['notebook_ip'],
                    "ip": notebook_config['notebook_ip'],
                    "notebook_name": notebook_config['notebook_name'],
                    "Action": "Start up notebook server"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_stop_notebook.py b/infrastructure-provisioning/src/general/scripts/gcp/common_stop_notebook.py
index 094de05..abde92e 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_stop_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_stop_notebook.py
@@ -25,13 +25,13 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 
 
 def stop_notebook(instance_name, bucket_name, region, zone, ssh_user, key_path, project_name):
-    print('Terminating Dataproc cluster and cleaning Dataproc config from bucket')
+    logging.info('Terminating Dataproc cluster and cleaning Dataproc config from bucket')
     try:
         labels = [
             {instance_name: '*'}
@@ -43,35 +43,35 @@ def stop_notebook(instance_name, bucket_name, region, zone, ssh_user, key_path,
                     'computational_name')
                 cluster = GCPMeta.get_list_cluster_statuses([cluster_name])
                 GCPActions.bucket_cleanup(bucket_name, project_name, cluster_name)
-                print('The bucket {} has been cleaned successfully'.format(bucket_name))
+                logging.info('The bucket {} has been cleaned successfully'.format(bucket_name))
                 GCPActions.delete_dataproc_cluster(cluster_name, region)
-                print('The Dataproc cluster {} has been terminated successfully'.format(cluster_name))
+                logging.info('The Dataproc cluster {} has been terminated successfully'.format(cluster_name))
                 GCPActions.remove_kernels(instance_name, cluster_name, cluster[0]['version'], ssh_user,
                                           key_path, computational_name)
         else:
-            print("There are no Dataproc clusters to terminate.")
+            logging.info("There are no Dataproc clusters to terminate.")
     except Exception as err:
         datalab.fab.append_result("Failed to terminate dataproc", str(err))
         sys.exit(1)
 
-    print("Stopping data engine cluster")
+    logging.info("Stopping data engine cluster")
     try:
         clusters_list = GCPMeta.get_list_instances_by_label(zone, instance_name)
         if clusters_list.get('items'):
             for vm in clusters_list['items']:
                 try:
                     GCPActions.stop_instance(vm['name'], zone)
-                    print("Instance {} has been stopped".format(vm['name']))
+                    logging.info("Instance {} has been stopped".format(vm['name']))
                 except:
                     pass
         else:
-            print("There are no data engine clusters to terminate.")
+            logging.info("There are no data engine clusters to terminate.")
 
     except Exception as err:
         datalab.fab.append_result("Failed to stop dataengine cluster", str(err))
         sys.exit(1)
 
-    print("Stopping notebook")
+    logging.info("Stopping notebook")
     try:
         GCPActions.stop_instance(instance_name, zone)
     except Exception as err:
@@ -80,17 +80,10 @@ def stop_notebook(instance_name, bucket_name, region, zone, ssh_user, key_path,
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     # generating variables dictionary
     GCPMeta = datalab.meta_lib.GCPMeta()
     GCPActions = datalab.actions_lib.GCPActions()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     notebook_config = dict()
     notebook_config['service_base_name'] = (os.environ['conf_service_base_name'])
     notebook_config['edge_user_name'] = (os.environ['edge_user_name'])
@@ -105,14 +98,13 @@ if __name__ == "__main__":
     notebook_config['gcp_zone'] = os.environ['gcp_zone']
 
     logging.info('[STOP NOTEBOOK]')
-    print('[STOP NOTEBOOK]')
     try:
         stop_notebook(notebook_config['notebook_name'], notebook_config['bucket_name'],
                       notebook_config['gcp_region'], notebook_config['gcp_zone'],
                       os.environ['conf_os_user'], notebook_config['key_path'],
                       notebook_config['project_name'])
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.info('Error: {0}'.format(err))
         datalab.fab.append_result("Failed to stop notebook.", str(err))
         sys.exit(1)
 
@@ -120,7 +112,7 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"notebook_name": notebook_config['notebook_name'],
                    "Action": "Stop notebook server"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_terminate_notebook.py b/infrastructure-provisioning/src/general/scripts/gcp/common_terminate_notebook.py
index 100d49d..db40b05 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_terminate_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_terminate_notebook.py
@@ -25,14 +25,14 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
 
 
 def terminate_nb(instance_name, bucket_name, region, zone, user_name):
-    print('Terminating Dataproc cluster and cleaning Dataproc config from bucket')
+    logging.info('Terminating Dataproc cluster and cleaning Dataproc config from bucket')
     try:
         labels = [
             {instance_name: '*'}
@@ -41,33 +41,33 @@ def terminate_nb(instance_name, bucket_name, region, zone, user_name):
         if clusters_list:
             for cluster_name in clusters_list:
                 GCPActions.bucket_cleanup(bucket_name, user_name, cluster_name)
-                print('The bucket {} has been cleaned successfully'.format(bucket_name))
+                logging.info('The bucket {} has been cleaned successfully'.format(bucket_name))
                 GCPActions.delete_dataproc_cluster(cluster_name, region)
-                print('The Dataproc cluster {} has been terminated successfully'.format(cluster_name))
+                logging.info('The Dataproc cluster {} has been terminated successfully'.format(cluster_name))
         else:
-            print("There are no Dataproc clusters to terminate.")
+            logging.info("There are no Dataproc clusters to terminate.")
     except Exception as err:
         datalab.fab.append_result("Failed to terminate dataproc", str(err))
         sys.exit(1)
 
-    print("Terminating data engine cluster")
+    logging.info("Terminating data engine cluster")
     try:
         clusters_list = GCPMeta.get_list_instances_by_label(zone, instance_name)
         if clusters_list.get('items'):
             for vm in clusters_list['items']:
                 try:
                     GCPActions.remove_instance(vm['name'], zone)
-                    print("Instance {} has been terminated".format(vm['name']))
+                    logging.info("Instance {} has been terminated".format(vm['name']))
                 except:
                     pass
         else:
-            print("There are no data engine clusters to terminate.")
+            logging.info("There are no data engine clusters to terminate.")
 
     except Exception as err:
         datalab.fab.append_result("Failed to terminate dataengine", str(err))
         sys.exit(1)
 
-    print("Terminating notebook")
+    logging.info("Terminating notebook")
     try:
         GCPActions.remove_instance(instance_name, zone)
     except Exception as err:
@@ -76,16 +76,10 @@ def terminate_nb(instance_name, bucket_name, region, zone, user_name):
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     # generating variables dictionary
     GCPMeta = datalab.meta_lib.GCPMeta()
     GCPActions = datalab.actions_lib.GCPActions()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     notebook_config = dict()
     notebook_config['service_base_name'] = (os.environ['conf_service_base_name'])
     notebook_config['edge_user_name'] = (os.environ['edge_user_name'])
@@ -100,7 +94,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[TERMINATE NOTEBOOK]')
-        print('[TERMINATE NOTEBOOK]')
         try:
             terminate_nb(notebook_config['notebook_name'], notebook_config['bucket_name'],
                          notebook_config['gcp_region'], notebook_config['gcp_zone'],
@@ -116,7 +109,7 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"notebook_name": notebook_config['notebook_name'],
                    "Action": "Terminate notebook server"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_configure.py
index 1773a61..7cc3c65 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_configure.py
@@ -26,7 +26,7 @@ import datalab.fab
 import datalab.meta_lib
 import datalab.notebook_lib
 import json
-import logging
+from datalab.logger import logging
 import multiprocessing
 import os
 import sys
@@ -40,7 +40,6 @@ def configure_dataengine_service(instance, dataproc_conf):
     # configuring proxy on Data Engine service
     try:
         logging.info('[CONFIGURE PROXY ON DATAENGINE SERVICE]')
-        print('[CONFIGURE PROXY ON DATAENGINE SERVICE]')
         additional_config = {"proxy_host": dataproc_conf['edge_instance_name'], "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}" \
             .format(dataproc_conf['instance_ip'], dataproc_conf['cluster_name'], dataproc_conf['key_path'],
@@ -57,7 +56,6 @@ def configure_dataengine_service(instance, dataproc_conf):
 
     try:
         logging.info('[CONFIGURE DATAENGINE SERVICE]')
-        print('[CONFIGURE DATAENGINE SERVICE]')
         try:
             global conn
             conn = datalab.fab.init_datalab_connection(dataproc_conf['instance_ip'], dataproc_conf['datalab_ssh_user'], dataproc_conf['key_path'])
@@ -77,7 +75,6 @@ def configure_dataengine_service(instance, dataproc_conf):
         sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         slaves = []
         for idx, instance in enumerate(dataproc_conf['cluster_core_instances']):
@@ -119,16 +116,10 @@ def configure_dataengine_service(instance, dataproc_conf):
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.INFO,
-                        filename=local_log_filepath)
     try:
         GCPMeta = datalab.meta_lib.GCPMeta()
         GCPActions = datalab.actions_lib.GCPActions()
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         dataproc_conf = dict()
         if 'exploratory_name' in os.environ:
             dataproc_conf['exploratory_name'] = os.environ['exploratory_name'].replace('_', '-').lower()
@@ -216,21 +207,20 @@ if __name__ == "__main__":
         dataproc_master_access_url = "https://" + dataproc_conf['edge_instance_hostname'] + "/{}/".format(
             dataproc_conf['exploratory_name'] + '_' + dataproc_conf['computational_name'])
         logging.info('[SUMMARY]')
-        print('[SUMMARY]')
-        print("Service base name: {}".format(dataproc_conf['service_base_name']))
-        print("Cluster name: {}".format(dataproc_conf['cluster_name']))
-        print("Key name: {}".format(dataproc_conf['key_name']))
-        print("Region: {}".format(dataproc_conf['region']))
-        print("Zone: {}".format(dataproc_conf['zone']))
-        print("Subnet: {}".format(dataproc_conf['subnet']))
-        print("Dataproc version: {}".format(dataproc_conf['release_label']))
-        print("Dataproc master node shape: {}".format(os.environ['dataproc_master_instance_type']))
-        print("Dataproc slave node shape: {}".format(os.environ['dataproc_slave_instance_type']))
-        print("Master count: {}".format(os.environ['dataproc_master_count']))
-        print("Slave count: {}".format(os.environ['dataproc_slave_count']))
-        print("Preemptible count: {}".format(os.environ['dataproc_preemptible_count']))
-        print("Notebook hostname: {}".format(os.environ['notebook_instance_name']))
-        print("Bucket name: {}".format(dataproc_conf['bucket_name']))
+        logging.info("Service base name: {}".format(dataproc_conf['service_base_name']))
+        logging.info("Cluster name: {}".format(dataproc_conf['cluster_name']))
+        logging.info("Key name: {}".format(dataproc_conf['key_name']))
+        logging.info("Region: {}".format(dataproc_conf['region']))
+        logging.info("Zone: {}".format(dataproc_conf['zone']))
+        logging.info("Subnet: {}".format(dataproc_conf['subnet']))
+        logging.info("Dataproc version: {}".format(dataproc_conf['release_label']))
+        logging.info("Dataproc master node shape: {}".format(os.environ['dataproc_master_instance_type']))
+        logging.info("Dataproc slave node shape: {}".format(os.environ['dataproc_slave_instance_type']))
+        logging.info("Master count: {}".format(os.environ['dataproc_master_count']))
+        logging.info("Slave count: {}".format(os.environ['dataproc_slave_count']))
+        logging.info("Preemptible count: {}".format(os.environ['dataproc_preemptible_count']))
+        logging.info("Notebook hostname: {}".format(os.environ['notebook_instance_name']))
+        logging.info("Bucket name: {}".format(dataproc_conf['bucket_name']))
         with open("/root/result.json", 'w') as result:
             res = {"hostname": dataproc_conf['cluster_name'],
                    "key_name": dataproc_conf['key_name'],
@@ -242,7 +232,7 @@ if __name__ == "__main__":
                         "url": dataproc_master_access_url}
                    ]
                    }
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_create.py b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_create.py
index f63e94c..978d2d4 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_create.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_create.py
@@ -27,6 +27,7 @@ import json
 import sys
 from datalab.actions_lib import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -39,12 +40,12 @@ args = parser.parse_args()
 
 def upload_jars_parser(args):
     if not datalab.actions_lib.GCPActions().put_to_bucket(args.bucket, '/root/scripts/dataengine-service_jars_parser.py', 'jars_parser.py'):
-        print('Failed to upload jars_parser script')
+        logging.info('Failed to upload jars_parser script')
         raise Exception
 
 
 def build_dataproc_cluster(args, cluster_name):
-    print("Will be created cluster: {}".format(json.dumps(params, sort_keys=True, indent=4, separators=(',', ': '))))
+    logging.info("Will be created cluster: {}".format(json.dumps(params, sort_keys=True, indent=4, separators=(',', ': '))))
     return datalab.actions_lib.GCPActions().create_dataproc_cluster(cluster_name, args.region, params)
 
 
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_install_libs.py b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_install_libs.py
index 32475e1..22fd566 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_install_libs.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_install_libs.py
@@ -21,7 +21,7 @@
 #
 # ******************************************************************************
 
-import logging
+from datalab.logger import logging
 import multiprocessing
 import os
 import sys
@@ -48,16 +48,8 @@ def install_libs(instance, data_engine):
 
 if __name__ == "__main__":
     instance_class = 'notebook'
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     try:
         logging.info('[INSTALLING ADDITIONAL LIBRARIES ON DATAENGINE-SERVICE]')
-        print('[INSTALLING ADDITIONAL LIBRARIES ON DATAENGINE-SERVICE]')
         data_engine = dict()
         try:
             data_engine['os_user'] = os.environ['conf_os_user']
@@ -70,7 +62,7 @@ if __name__ == "__main__":
             data_engine['keyfile'] = '{}{}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
             data_engine['libs'] = os.environ['libs']
         except Exception as err:
-            print('Error: {0}'.format(err))
+            logging.error('Error: {0}'.format(err))
             append_result("Failed to get parameter.", str(err))
             sys.exit(1)
         try:
@@ -88,6 +80,6 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         append_result("Failed to install additional libraries.", str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_list_libs.py b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_list_libs.py
index 55af8b5..4b4af7c 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_list_libs.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_list_libs.py
@@ -21,7 +21,7 @@
 #
 # ******************************************************************************
 
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -29,20 +29,13 @@ import subprocess
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 from fabric import *
 
 if __name__ == "__main__":
     instance_class = 'notebook'
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     try:
         logging.info('[GETTING AVAILABLE PACKAGES]')
-        print('[GETTING AVAILABLE PACKAGES]')
         data_engine = dict()
         try:
             data_engine['os_user'] = os.environ['conf_os_user']
@@ -66,6 +59,6 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         append_result("Failed to get available libraries.", str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_prepare.py b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_prepare.py
index 3229525..5c8972d 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_prepare.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import time
@@ -35,16 +35,10 @@ from Crypto.PublicKey import RSA
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.INFO,
-                        filename=local_log_filepath)
     try:
         GCPMeta = datalab.meta_lib.GCPMeta()
         GCPActions = datalab.actions_lib.GCPActions()
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         dataproc_conf = dict()
         if 'exploratory_name' in os.environ:
             dataproc_conf['exploratory_name'] = os.environ['exploratory_name'].replace('_', '-').lower()
@@ -118,7 +112,7 @@ if __name__ == "__main__":
     edge_status = GCPMeta.get_instance_status(dataproc_conf['edge_instance_hostname'])
     if edge_status != 'RUNNING':
         logging.info('ERROR: Edge node is unavailable! Aborting...')
-        print('ERROR: Edge node is unavailable! Aborting...')
+        logging.info('ERROR: Edge node is unavailable! Aborting...')
         ssn_hostname = GCPMeta.get_private_ip_address(dataproc_conf['service_base_name'] + '-ssn')
         datalab.fab.put_resource_status('edge', 'Unavailable', os.environ['ssn_datalab_path'],
                                         os.environ['conf_os_user'],
@@ -126,7 +120,7 @@ if __name__ == "__main__":
         datalab.fab.append_result("Edge node is unavailable")
         sys.exit(1)
 
-    print("Will create exploratory environment with edge node as access point as following: ".format(
+    logging.info("Will create exploratory environment with edge node as access point as following: ".format(
         json.dumps(dataproc_conf, sort_keys=True, indent=4, separators=(',', ': '))))
     logging.info(json.dumps(dataproc_conf))
 
@@ -181,7 +175,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[Creating Dataproc Cluster]')
-        print('[Creating Dataproc Cluster]')
         params = "--region {0} --bucket {1} --params '{2}'".format(dataproc_conf['region'],
                                                                    dataproc_conf['bucket_name'],
                                                                    json.dumps(dataproc_cluster))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_terminate.py b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_terminate.py
index f66a67d..c30cad2 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_terminate.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_terminate.py
@@ -25,43 +25,36 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
 
 
 def terminate_dataproc_cluster(notebook_name, dataproc_name, bucket_name, ssh_user, key_path):
-    print('Terminating Dataproc cluster and cleaning Dataproc config from bucket')
+    logging.info('Terminating Dataproc cluster and cleaning Dataproc config from bucket')
     try:
         cluster = GCPMeta.get_list_cluster_statuses([dataproc_name])
         if cluster[0]['status'] == 'running':
             computational_name = GCPMeta.get_cluster(dataproc_name).get('labels').get('computational_name')
             GCPActions.bucket_cleanup(bucket_name, dataproc_conf['project_name'], dataproc_name)
-            print('The bucket {} has been cleaned successfully'.format(bucket_name))
+            logging.info('The bucket {} has been cleaned successfully'.format(bucket_name))
             GCPActions.delete_dataproc_cluster(dataproc_name, os.environ['gcp_region'])
-            print('The Dataproc cluster {} has been terminated successfully'.format(dataproc_name))
+            logging.info('The Dataproc cluster {} has been terminated successfully'.format(dataproc_name))
             GCPActions.remove_kernels(notebook_name, dataproc_name, cluster[0]['version'], ssh_user,
                                                     key_path, computational_name)
         else:
-            print("There are no Dataproc clusters to terminate.")
+            logging.info("There are no Dataproc clusters to terminate.")
     except Exception as err:
         datalab.fab.append_result("Failed to terminate Dataproc cluster.", str(err))
         sys.exit(1)
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     # generating variables dictionary
     GCPMeta = datalab.meta_lib.GCPMeta()
     GCPActions = datalab.actions_lib.GCPActions()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     dataproc_conf = dict()
     dataproc_conf['service_base_name'] = os.environ['conf_service_base_name']
     dataproc_conf['edge_user_name'] = (os.environ['edge_user_name'])
@@ -79,7 +72,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[TERMINATE DATAPROC CLUSTER]')
-        print('[TERMINATE DATAPROC CLUSTER]')
         try:
             terminate_dataproc_cluster(dataproc_conf['notebook_name'], dataproc_conf['dataproc_name'],
                                        dataproc_conf['bucket_name'], os.environ['conf_os_user'],
@@ -97,7 +89,7 @@ if __name__ == "__main__":
                    "notebook_name": dataproc_conf['notebook_name'],
                    "user_own_bucket_name": dataproc_conf['bucket_name'],
                    "Action": "Terminate Dataproc cluster"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/dataengine_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/dataengine_configure.py
index 5a33caa..76bfb13 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine_configure.py
@@ -25,7 +25,7 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import multiprocessing
 import os
 import sys
@@ -39,7 +39,6 @@ def configure_slave(slave_number, data_engine):
     slave_hostname = GCPMeta.get_private_ip_address(slave_name)
     try:
         logging.info('[CREATING DATALAB SSH USER ON SLAVE NODE]')
-        print('[CREATING DATALAB SSH USER ON SLAVE NODE]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format \
             (slave_hostname, os.environ['conf_key_dir'] + data_engine['key_name'] + ".pem", initial_user,
              data_engine['datalab_ssh_user'], sudo_group)
@@ -55,7 +54,6 @@ def configure_slave(slave_number, data_engine):
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY ON SLAVE NODE]')
         logging.info('[INSTALLING USERs KEY ON SLAVE NODE]')
         additional_config = {"user_keyname": data_engine['project_name'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -74,7 +72,6 @@ def configure_slave(slave_number, data_engine):
 
     try:
         logging.info('[CONFIGURE PROXY ON SLAVE NODE]')
-        print('[CONFIGURE PROXY ON ON SLAVE NODE]')
         additional_config = {"proxy_host": edge_instance_name, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}"\
             .format(slave_hostname, slave_name, keyfile_name, json.dumps(additional_config),
@@ -91,7 +88,6 @@ def configure_slave(slave_number, data_engine):
 
     try:
         logging.info('[INSTALLING PREREQUISITES ON SLAVE NODE]')
-        print('[INSTALLING PREREQUISITES ON SLAVE NODE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}". \
             format(slave_hostname, keyfile_name, data_engine['datalab_ssh_user'], data_engine['region'],
                    edge_instance_private_ip)
@@ -107,7 +103,6 @@ def configure_slave(slave_number, data_engine):
 
     try:
         logging.info('[CONFIGURE SLAVE NODE {}]'.format(slave + 1))
-        print('[CONFIGURE SLAVE NODE {}]'.format(slave + 1))
         params = "--hostname {} --keyfile {} --region {} --spark_version {} --hadoop_version {} --os_user {} " \
                  "--scala_version {} --master_ip {} --node_type {}". \
             format(slave_hostname, keyfile_name, data_engine['region'], os.environ['notebook_spark_version'],
@@ -125,7 +120,7 @@ def configure_slave(slave_number, data_engine):
 
     if 'slave_gpu_type' in os.environ:
         try:
-            print('[INSTALLING GPU DRIVERS ON MASTER NODE]')
+            logging.info('[INSTALLING GPU DRIVERS ON MASTER NODE]')
             params = "--hostname {} --keyfile {} --os_user {}".format(
                 slave_hostname, keyfile_name, data_engine['datalab_ssh_user'])
             try:
@@ -148,17 +143,10 @@ def clear_resources():
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.INFO,
-                        filename=local_log_filepath)
-
     try:
         GCPMeta = datalab.meta_lib.GCPMeta()
         GCPActions = datalab.actions_lib.GCPActions()
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         data_engine = dict()
         data_engine['service_base_name'] = (os.environ['conf_service_base_name'])
         data_engine['edge_user_name'] = (os.environ['edge_user_name'])
@@ -228,7 +216,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATING DATALAB SSH USER ON MASTER NODE]')
-        print('[CREATING DATALAB SSH USER ON MASTER NODE]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format \
             (master_node_hostname, os.environ['conf_key_dir'] + data_engine['key_name'] + ".pem", initial_user,
              data_engine['datalab_ssh_user'], sudo_group)
@@ -244,7 +231,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY ON MASTER NODE]')
         logging.info('[INSTALLING USERs KEY ON MASTER NODE]')
         additional_config = {"user_keyname": data_engine['project_name'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -263,7 +249,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CONFIGURE PROXY ON MASTER NODE]')
-        print('[CONFIGURE PROXY ON ON MASTER NODE]')
         additional_config = {"proxy_host": edge_instance_name, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}"\
             .format(master_node_hostname, data_engine['master_node_name'], keyfile_name, json.dumps(additional_config),
@@ -280,7 +265,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[INSTALLING PREREQUISITES ON MASTER NODE]')
-        print('[INSTALLING PREREQUISITES ON MASTER NODE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}". \
             format(master_node_hostname, keyfile_name, data_engine['datalab_ssh_user'], data_engine['region'],
                    edge_instance_private_ip)
@@ -296,7 +280,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CONFIGURE MASTER NODE]')
-        print('[CONFIGURE MASTER NODE]')
         params = "--hostname {} --keyfile {} --region {} --spark_version {} --hadoop_version {} --os_user {} " \
                  "--scala_version {} --master_ip {} --node_type {}".\
             format(master_node_hostname, keyfile_name, data_engine['region'], os.environ['notebook_spark_version'],
@@ -315,7 +298,7 @@ if __name__ == "__main__":
 
     if 'master_gpu_type' in os.environ:
         try:
-            print('[INSTALLING GPU DRIVERS ON MASTER NODE]')
+            logging.info('[INSTALLING GPU DRIVERS ON MASTER NODE]')
             params = "--hostname {} --keyfile {} --os_user {}".format(
                 master_node_hostname, keyfile_name, data_engine['datalab_ssh_user'])
             try:
@@ -346,7 +329,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         notebook_instance_ip = GCPMeta.get_private_ip_address(data_engine['notebook_name'])
         additional_info = {
@@ -386,13 +368,12 @@ if __name__ == "__main__":
         spark_master_access_url = "https://" + edge_instance_hostname + "/{}/".format(
             data_engine['exploratory_name'] + '_' + data_engine['computational_name'])
         logging.info('[SUMMARY]')
-        print('[SUMMARY]')
-        print("Service base name: {}".format(data_engine['service_base_name']))
-        print("Region: {}".format(data_engine['region']))
-        print("Cluster name: {}".format(data_engine['cluster_name']))
-        print("Master node shape: {}".format(data_engine['master_size']))
-        print("Slave node shape: {}".format(data_engine['slave_size']))
-        print("Instance count: {}".format(str(data_engine['instance_count'])))
+        logging.info("Service base name: {}".format(data_engine['service_base_name']))
+        logging.info("Region: {}".format(data_engine['region']))
+        logging.info("Cluster name: {}".format(data_engine['cluster_name']))
+        logging.info("Master node shape: {}".format(data_engine['master_size']))
+        logging.info("Slave node shape: {}".format(data_engine['slave_size']))
+        logging.info("Instance count: {}".format(str(data_engine['instance_count'])))
         with open("/root/result.json", 'w') as result:
             res = {"hostname": data_engine['cluster_name'],
                    "instance_id": data_engine['master_node_name'],
@@ -405,7 +386,7 @@ if __name__ == "__main__":
                        # "url": spark_master_url}
                    ]
                    }
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/dataengine_prepare.py b/infrastructure-provisioning/src/general/scripts/gcp/dataengine_prepare.py
index 96ba448..2db42e6 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine_prepare.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -34,16 +34,10 @@ from fabric import *
 
 if __name__ == "__main__":
     instance_class = 'notebook'
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         GCPMeta = datalab.meta_lib.GCPMeta()
         GCPActions = datalab.actions_lib.GCPActions()
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         data_engine = dict()
         data_engine['service_base_name'] = (os.environ['conf_service_base_name'])
         data_engine['edge_user_name'] = (os.environ['edge_user_name'])
@@ -64,7 +58,6 @@ if __name__ == "__main__":
                                                                             data_engine['endpoint_name']))
         if edge_status != 'RUNNING':
             logging.info('ERROR: Edge node is unavailable! Aborting...')
-            print('ERROR: Edge node is unavailable! Aborting...')
             ssn_hostname = GCPMeta.get_private_ip_address(data_engine['service_base_name'] + '-ssn')
             datalab.fab.put_resource_status('edge', 'Unavailable', os.environ['ssn_datalab_path'],
                                             os.environ['conf_os_user'],
@@ -131,12 +124,12 @@ if __name__ == "__main__":
                 data_engine['service_base_name'], data_engine['endpoint_tag'], os.environ['application'])
         data_engine['notebook_primary_image_name'] = (lambda x: os.environ['notebook_primary_image_name'] if x != 'None'
         else data_engine['expected_primary_image_name'])(str(os.environ.get('notebook_primary_image_name')))
-        print('Searching pre-configured images')
+        logging.info('Searching pre-configured images')
         data_engine['primary_image_name'] = GCPMeta.get_image_by_name(data_engine['notebook_primary_image_name'])
         if data_engine['primary_image_name'] == '':
             data_engine['primary_image_name'] = os.environ['gcp_{}_image_name'.format(os.environ['conf_os_family'])]
         else:
-            print('Pre-configured primary image found. Using: {}'.format(data_engine['primary_image_name'].get('name')))
+            logging.info('Pre-configured primary image found. Using: {}'.format(data_engine['primary_image_name'].get('name')))
             data_engine['primary_image_name'] = 'global/images/{}'.format(
                 data_engine['primary_image_name'].get('name'))
 
@@ -144,7 +137,7 @@ if __name__ == "__main__":
         if data_engine['secondary_image_name'] == '':
             data_engine['secondary_image_name'] = 'None'
         else:
-            print('Pre-configured secondary image found. Using: {}'.format(
+            logging.info('Pre-configured secondary image found. Using: {}'.format(
                 data_engine['secondary_image_name'].get('name')))
             data_engine['secondary_image_name'] = 'global/images/{}'.format(
                 data_engine['secondary_image_name'].get('name'))
@@ -189,7 +182,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATE MASTER NODE]')
-        print('[CREATE MASTER NODE]')
         params = "--instance_name {0} --region {1} --zone {2} --vpc_name {3} --subnet_name {4} --instance_size {5} " \
                  "--ssh_key_path {6} --initial_user {7} --service_account_name {8} --image_name {9} " \
                  "--secondary_image_name {10} --instance_class {11} --primary_disk_size {12} " \
@@ -215,7 +207,6 @@ if __name__ == "__main__":
     try:
         for i in range(data_engine['instance_count'] - 1):
             logging.info('[CREATE SLAVE NODE {}]'.format(i + 1))
-            print('[CREATE SLAVE NODE {}]'.format(i + 1))
             slave_name = data_engine['slave_node_name'] + '{}'.format(i + 1)
             params = "--instance_name {0} --region {1} --zone {2} --vpc_name {3} --subnet_name {4} " \
                      "--instance_size {5} --ssh_key_path {6} --initial_user {7} --service_account_name {8} " \
@@ -242,7 +233,7 @@ if __name__ == "__main__":
             try:
                 GCPActions.remove_instance(slave_name, data_engine['zone'])
             except:
-                print("The slave instance {} hasn't been created.".format(slave_name))
+                logging.error("The slave instance {} hasn't been created.".format(slave_name))
         GCPActions.remove_instance(data_engine['master_node_name'], data_engine['zone'])
         datalab.fab.append_result("Failed to create slave instances.", str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/dataengine_start.py b/infrastructure-provisioning/src/general/scripts/gcp/dataengine_start.py
index 7843592..865d846 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine_start.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine_start.py
@@ -25,7 +25,7 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -34,7 +34,7 @@ from fabric import *
 
 
 def start_data_engine(zone, cluster_name):
-    print("Starting data engine cluster")
+    logging.info("Starting data engine cluster")
     try:
         instances = GCPMeta.get_list_instances(zone, cluster_name)
         if 'items' in instances:
@@ -46,16 +46,10 @@ def start_data_engine(zone, cluster_name):
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     # generating variables dictionary
     GCPMeta = datalab.meta_lib.GCPMeta()
     GCPActions = datalab.actions_lib.GCPActions()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     data_engine = dict()
     if 'exploratory_name' in os.environ:
         data_engine['exploratory_name'] = os.environ['exploratory_name'].replace('_', '-').lower()
@@ -76,7 +70,6 @@ if __name__ == "__main__":
                                                           data_engine['computational_name'])
     try:
         logging.info('[STARTING DATA ENGINE]')
-        print('[STARTING DATA ENGINE]')
         try:
             start_data_engine(data_engine['zone'], data_engine['cluster_name'])
         except Exception as err:
@@ -88,7 +81,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[UPDATE LAST ACTIVITY TIME]')
-        print('[UPDATE LAST ACTIVITY TIME]')
         data_engine['computational_id'] = data_engine['cluster_name'] + '-m'
         data_engine['tag_name'] = data_engine['service_base_name'] + '-tag'
         data_engine['notebook_ip'] = GCPMeta.get_private_ip_address(os.environ['notebook_instance_name'])
@@ -110,7 +102,7 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"service_base_name": data_engine['service_base_name'],
                    "Action": "Start Data Engine"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/dataengine_stop.py b/infrastructure-provisioning/src/general/scripts/gcp/dataengine_stop.py
index 1a06c2d..20ee0ba 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine_stop.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine_stop.py
@@ -25,14 +25,14 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
 
 
 def stop_data_engine(zone, cluster_name):
-    print("Stopping data engine cluster")
+    logging.info("Stopping data engine cluster")
     try:
         instances = GCPMeta.get_list_instances(zone, cluster_name)
         if 'items' in instances:
@@ -44,16 +44,10 @@ def stop_data_engine(zone, cluster_name):
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     # generating variables dictionary
     GCPMeta = datalab.meta_lib.GCPMeta()
     GCPActions = datalab.actions_lib.GCPActions()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     data_engine = dict()
     if 'exploratory_name' in os.environ:
         data_engine['exploratory_name'] = os.environ['exploratory_name'].replace('_', '-').lower()
@@ -74,7 +68,6 @@ if __name__ == "__main__":
                                                           data_engine['computational_name'])
     try:
         logging.info('[STOPPING DATA ENGINE]')
-        print('[STOPPING DATA ENGINE]')
         try:
             stop_data_engine(data_engine['zone'], data_engine['cluster_name'])
         except Exception as err:
@@ -87,7 +80,7 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"service_base_name": data_engine['service_base_name'],
                    "Action": "Stop Data Engine"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/dataengine_terminate.py b/infrastructure-provisioning/src/general/scripts/gcp/dataengine_terminate.py
index 5751014..24516c5 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine_terminate.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine_terminate.py
@@ -25,14 +25,14 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
 
 
 def terminate_data_engine(zone, notebook_name, os_user, key_path, cluster_name):
-    print("Terminating data engine cluster")
+    logging.info("Terminating data engine cluster")
     try:
         instances = GCPMeta.get_list_instances(zone, cluster_name)
         if 'items' in instances:
@@ -42,7 +42,7 @@ def terminate_data_engine(zone, notebook_name, os_user, key_path, cluster_name):
         datalab.fab.append_result("Failed to terminate dataengine", str(err))
         sys.exit(1)
 
-    print("Removing Data Engine kernels from notebook")
+    logging.info("Removing Data Engine kernels from notebook")
     try:
         datalab.actions_lib.remove_dataengine_kernels(notebook_name, os_user, key_path, cluster_name)
     except Exception as err:
@@ -51,16 +51,10 @@ def terminate_data_engine(zone, notebook_name, os_user, key_path, cluster_name):
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     # generating variables dictionary
     GCPMeta = datalab.meta_lib.GCPMeta()
     GCPActions = datalab.actions_lib.GCPActions()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     data_engine = dict()
     if 'exploratory_name' in os.environ:
         data_engine['exploratory_name'] = os.environ['exploratory_name'].replace('_', '-').lower()
@@ -85,7 +79,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[TERMINATE DATA ENGINE]')
-        print('[TERMINATE DATA ENGINE]')
         try:
             terminate_data_engine(data_engine['zone'], data_engine['notebook_name'], os.environ['conf_os_user'],
                                   data_engine['key_path'], data_engine['cluster_name'])
@@ -100,7 +93,7 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"service_base_name": data_engine['service_base_name'],
                    "Action": "Terminate Data Engine"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/deeplearning_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/deeplearning_configure.py
index 5f8fea4..be615de 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/deeplearning_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/deeplearning_configure.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -33,12 +33,6 @@ import subprocess
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         GCPMeta = datalab.meta_lib.GCPMeta()
         GCPActions = datalab.actions_lib.GCPActions()
@@ -105,7 +99,6 @@ if __name__ == "__main__":
             notebook_config['sudo_group'] = 'wheel'
 
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             instance_hostname, notebook_config['ssh_key_path'], notebook_config['initial_user'],
             notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
@@ -123,7 +116,6 @@ if __name__ == "__main__":
     # configuring proxy on Notebook instance
     try:
         logging.info('[CONFIGURE PROXY ON DEEPLEARNING INSTANCE]')
-        print('[CONFIGURE PROXY ON DEEPLEARNING INSTANCE]')
         additional_config = {"proxy_host": edge_instance_name, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}" \
             .format(instance_hostname, notebook_config['instance_name'], notebook_config['ssh_key_path'],
@@ -141,7 +133,6 @@ if __name__ == "__main__":
     # updating repositories & installing python packages
     try:
         logging.info('[INSTALLING PREREQUISITES TO DEEPLEARNING NOTEBOOK INSTANCE]')
-        print('[INSTALLING PREREQUISITES TO DEEPLEARNING NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --user {} --region {}". \
             format(instance_hostname, notebook_config['ssh_key_path'], notebook_config['datalab_ssh_user'],
                    os.environ['gcp_region'])
@@ -157,7 +148,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CONFIGURE DEEP LEARNING NOTEBOOK INSTANCE]')
-        print('[CONFIGURE DEEP LEARNING NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} " \
                  "--os_user {} --jupyter_version {} " \
                  "--scala_version {} --spark_version {} " \
@@ -179,7 +169,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": notebook_config['project_name'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -197,7 +186,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, notebook_config['ssh_key_path'])
@@ -214,20 +202,20 @@ if __name__ == "__main__":
 
     if notebook_config['image_enabled'] == 'true':
         try:
-            print('[CREATING IMAGE]')
+            logging.info('[CREATING IMAGE]')
             primary_image_id = GCPMeta.get_image_by_name(notebook_config['expected_primary_image_name'])
             if primary_image_id == '':
-                print("Looks like it's first time we configure notebook server. Creating images.")
+                logging.info("Looks like it's first time we configure notebook server. Creating images.")
                 image_id_list = GCPActions.create_image_from_instance_disks(
                     notebook_config['expected_primary_image_name'], notebook_config['expected_secondary_image_name'],
                     notebook_config['instance_name'], notebook_config['zone'], notebook_config['image_labels'])
                 if image_id_list and image_id_list[0] != '':
-                    print("Image of primary disk was successfully created. It's ID is {}".format(image_id_list[0]))
+                    logging.info("Image of primary disk was successfully created. It's ID is {}".format(image_id_list[0]))
                 else:
-                    print("Looks like another image creating operation for your template have been started a "
+                    logging.info("Looks like another image creating operation for your template have been started a "
                           "moment ago.")
                 if image_id_list and image_id_list[1] != '':
-                    print("Image of secondary disk was successfully created. It's ID is {}".format(image_id_list[1]))
+                    logging.info("Image of secondary disk was successfully created. It's ID is {}".format(image_id_list[1]))
         except Exception as err:
             datalab.fab.append_result("Failed creating image.", str(err))
             GCPActions.remove_instance(notebook_config['instance_name'], notebook_config['zone'])
@@ -236,7 +224,6 @@ if __name__ == "__main__":
             sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         additional_info = {
             'instance_hostname': instance_hostname,
@@ -277,18 +264,17 @@ if __name__ == "__main__":
             notebook_config['exploratory_name'])
         #tensorboard_access_url = "https://" + edge_instance_hostname + "/{}-tensor/".format(
         #    notebook_config['exploratory_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['instance_name']))
-        print("Private IP: {}".format(ip_address))
-        print("Instance type: {}".format(notebook_config['instance_type']))
-        print("Key name: {}".format(notebook_config['key_name']))
-        print("User key name: {}".format(notebook_config['project_name']))
-        #print("TensorBoard URL: {}".format(tensorboard_url))
-        #print("TensorBoard log dir: /var/log/tensorboard")
-        print("JupyterLab URL: {}".format(jupyter_ip_url))
-        print("Ungit URL: {}".format(ungit_ip_url))
-        print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(
+        logging.info("Instance name: {}".format(notebook_config['instance_name']))
+        logging.info("Private IP: {}".format(ip_address))
+        logging.info("Instance type: {}".format(notebook_config['instance_type']))
+        logging.info("Key name: {}".format(notebook_config['key_name']))
+        logging.info("User key name: {}".format(notebook_config['project_name']))
+        #logging.info("TensorBoard URL: {}".format(tensorboard_url))
+        #logging.info("TensorBoard log dir: /var/log/tensorboard")
+        logging.info("JupyterLab URL: {}".format(jupyter_ip_url))
+        logging.info("Ungit URL: {}".format(ungit_ip_url))
+        logging.info('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(
             notebook_config['key_name'], notebook_config['datalab_ssh_user'], ip_address))
 
         with open("/root/result.json", 'w') as result:
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/edge_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/edge_configure.py
index ac7fbb0..c085ade 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/edge_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/edge_configure.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -34,13 +34,6 @@ import subprocess
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/edge/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     def clear_resources():
         GCPActions.remove_instance(edge_conf['instance_name'], edge_conf['zone'])
         GCPActions.remove_static_address(edge_conf['static_address_name'], edge_conf['region'])
@@ -62,7 +55,7 @@ if __name__ == "__main__":
     try:
         GCPMeta = datalab.meta_lib.GCPMeta()
         GCPActions = datalab.actions_lib.GCPActions()
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         edge_conf = dict()
         edge_conf['service_base_name'] = (os.environ['conf_service_base_name'])
         edge_conf['project_name'] = (os.environ['project_name']).replace('_', '-').lower()
@@ -155,7 +148,6 @@ if __name__ == "__main__":
             edge_conf['sudo_group'] = 'wheel'
 
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             edge_conf['instance_hostname'], "/root/keys/" + os.environ['conf_key_name'] + ".pem",
             edge_conf['initial_user'], edge_conf['datalab_ssh_user'], edge_conf['sudo_group'])
@@ -171,7 +163,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING PREREQUISITES]')
         logging.info('[INSTALLING PREREQUISITES]')
         params = "--hostname {} --keyfile {} --user {} --region {}".format(
             edge_conf['instance_hostname'], edge_conf['ssh_key_path'], edge_conf['datalab_ssh_user'],
@@ -187,7 +178,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING HTTP PROXY]')
         logging.info('[INSTALLING HTTP PROXY]')
         additional_config = {"exploratory_subnet": edge_conf['private_subnet_cidr'],
                              "template_file": "/root/templates/squid.conf",
@@ -213,7 +203,6 @@ if __name__ == "__main__":
 
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": edge_conf['user_keyname'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -231,7 +220,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING NGINX REVERSE PROXY]')
         logging.info('[INSTALLING NGINX REVERSE PROXY]')
         edge_conf['keycloak_client_secret'] = str(uuid.uuid4())
         params = "--hostname {} --keyfile {} --user {} --keycloak_client_id {} --keycloak_client_secret {} " \
@@ -271,9 +259,9 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[CONFIGRING EDGE AS NAT]')
+        logging.info('[CONFIGRING EDGE AS NAT]')
         if os.environ['edge_is_nat'] == 'true':
-            print('Installing nftables')
+            logging.info('Installing nftables')
             additional_config = {"exploratory_subnet": edge_conf['private_subnet_cidr'],
                                  "edge_ip": edge_conf['private_ip']}
             params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
@@ -298,17 +286,16 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(edge_conf['instance_name']))
-        print("Hostname: {}".format(edge_conf['instance_hostname']))
-        print("Public IP: {}".format(edge_conf['static_ip']))
-        print("Private IP: {}".format(edge_conf['private_ip']))
-        print("Key name: {}".format(edge_conf['key_name']))
-        print("Bucket name: {}".format(edge_conf['bucket_name']))
-        print("Shared bucket name: {}".format(edge_conf['shared_bucket_name']))
-        print("Notebook subnet: {}".format(edge_conf['private_subnet_cidr']))
-        print("Available GPU types: {}".format(edge_conf['gpu_types']))
+        logging.info("Instance name: {}".format(edge_conf['instance_name']))
+        logging.info("Hostname: {}".format(edge_conf['instance_hostname']))
+        logging.info("Public IP: {}".format(edge_conf['static_ip']))
+        logging.info("Private IP: {}".format(edge_conf['private_ip']))
+        logging.info("Key name: {}".format(edge_conf['key_name']))
+        logging.info("Bucket name: {}".format(edge_conf['bucket_name']))
+        logging.info("Shared bucket name: {}".format(edge_conf['shared_bucket_name']))
+        logging.info("Notebook subnet: {}".format(edge_conf['private_subnet_cidr']))
+        logging.info("Available GPU types: {}".format(edge_conf['gpu_types']))
         with open("/root/result.json", 'w') as result:
             res = {"hostname": edge_conf['instance_hostname'],
                    "public_ip": edge_conf['static_ip'],
@@ -325,7 +312,7 @@ if __name__ == "__main__":
                    "gpu_types": edge_conf['gpu_types'],
                    "@class": "com.epam.datalab.dto.gcp.edge.EdgeInfoGcp",
                    "Action": "Create new EDGE server"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results.", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/edge_create_static_ip.py b/infrastructure-provisioning/src/general/scripts/gcp/edge_create_static_ip.py
index 0411f7e..3564fb9 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/edge_create_static_ip.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/edge_create_static_ip.py
@@ -27,6 +27,7 @@ import sys
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--address_name', type=str, default='')
@@ -34,18 +35,12 @@ parser.add_argument('--region', type=str, default='')
 args = parser.parse_args()
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         if GCPMeta().get_static_address(args.region, args.address_name):
-            print("REQUESTED STATIC ADDRESS {} ALREADY EXISTS".format(args.address_name))
+            logging.info("REQUESTED STATIC ADDRESS {} ALREADY EXISTS".format(args.address_name))
         else:
-            print("Creating Elastic IP")
+            logging.info("Creating Elastic IP")
             GCPActions().create_static_address(args.address_name, args.region)
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/edge_start.py b/infrastructure-provisioning/src/general/scripts/gcp/edge_start.py
index eddcfa2..2d35732 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/edge_start.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/edge_start.py
@@ -25,22 +25,15 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     # generating variables dictionary
     GCPMeta = datalab.meta_lib.GCPMeta()
     GCPActions = datalab.actions_lib.GCPActions()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     edge_conf = dict()
     edge_conf['service_base_name'] = (os.environ['conf_service_base_name'])
     edge_conf['project_name'] = (os.environ['project_name']).replace('_', '-').lower()
@@ -54,7 +47,6 @@ if __name__ == "__main__":
                                                                edge_conf['endpoint_name'])
 
     logging.info('[START EDGE]')
-    print('[START EDGE]')
     try:
         GCPActions.start_instance(edge_conf['instance_name'], edge_conf['zone'])
     except Exception as err:
@@ -66,19 +58,18 @@ if __name__ == "__main__":
         public_ip_address = \
             GCPMeta.get_static_address(edge_conf['region'], edge_conf['static_address_name'])['address']
         ip_address = GCPMeta.get_private_ip_address(edge_conf['instance_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(edge_conf['instance_name']))
-        print("Hostname: {}".format(instance_hostname))
-        print("Public IP: {}".format(public_ip_address))
-        print("Private IP: {}".format(ip_address))
+        logging.info("Instance name: {}".format(edge_conf['instance_name']))
+        logging.info("Hostname: {}".format(instance_hostname))
+        logging.info("Public IP: {}".format(public_ip_address))
+        logging.info("Private IP: {}".format(ip_address))
         with open("/root/result.json", 'w') as result:
             res = {"instance_name": edge_conf['instance_name'],
                    "hostname": instance_hostname,
                    "public_ip": public_ip_address,
                    "ip": ip_address,
                    "Action": "Start up notebook server"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/edge_status.py b/infrastructure-provisioning/src/general/scripts/gcp/edge_status.py
index d7b2dbb..8ad3c8f 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/edge_status.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/edge_status.py
@@ -28,20 +28,13 @@ import subprocess
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/edge/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
-    print('Getting statuses of DataLab resources')
+    logging.info('Getting statuses of DataLab resources')
 
     try:
-        logging.info('[COLLECT DATA]')
-        print('[COLLECTING DATA]')
+        logging.info('[COLLECTING DATA]')
         params = '--list_resources "{}"'.format(os.environ['edge_list_resources'])
         try:
             subprocess.run("~/scripts/{}.py {}".format('common_collect_data', params), shell=True, check=True)
@@ -49,6 +42,6 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         append_result("Failed to collect information about DataLab resources.", str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/edge_stop.py b/infrastructure-provisioning/src/general/scripts/gcp/edge_stop.py
index e8ac3f3..5490dae 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/edge_stop.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/edge_stop.py
@@ -25,19 +25,12 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     GCPMeta = datalab.meta_lib.GCPMeta()
     GCPActions = datalab.actions_lib.GCPActions()
     edge_conf = dict()
@@ -49,7 +42,6 @@ if __name__ == "__main__":
                                                            edge_conf['project_name'], edge_conf['endpoint_name'])
 
     logging.info('[STOP EDGE]')
-    print('[STOP EDGE]')
     try:
         GCPActions.stop_instance(edge_conf['instance_name'], edge_conf['zone'])
     except Exception as err:
@@ -60,7 +52,7 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"instance_name": edge_conf['instance_name'],
                    "Action": "Stop edge server"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/edge_terminate.py b/infrastructure-provisioning/src/general/scripts/gcp/edge_terminate.py
index d92b697..b0cf36f 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/edge_terminate.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/edge_terminate.py
@@ -25,14 +25,14 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
 
 
 def terminate_edge_node(user_name, service_base_name, region, zone, project_name, endpoint_name):
-    print("Terminating Dataengine-service clusters")
+    logging.info("Terminating Dataengine-service clusters")
     try:
         labels = [
             {'sbn': service_base_name},
@@ -42,14 +42,14 @@ def terminate_edge_node(user_name, service_base_name, region, zone, project_name
         if clusters_list:
             for cluster_name in clusters_list:
                 GCPActions.delete_dataproc_cluster(cluster_name, region)
-                print('The Dataproc cluster {} has been terminated successfully'.format(cluster_name))
+                logging.info('The Dataproc cluster {} has been terminated successfully'.format(cluster_name))
         else:
-            print("There are no Dataproc clusters to terminate.")
+            logging.info("There are no Dataproc clusters to terminate.")
     except Exception as err:
         datalab.fab.append_result("Failed to terminate dataproc", str(err))
         sys.exit(1)
 
-    print("Terminating EDGE and notebook instances")
+    logging.info("Terminating EDGE and notebook instances")
     base = '{}-{}-{}'.format(service_base_name, project_name, endpoint_name)
     keys = ['edge', 'ps', 'static-ip', 'bucket', 'subnet']
     targets = ['{}-{}'.format(base, k) for k in keys]
@@ -63,7 +63,7 @@ def terminate_edge_node(user_name, service_base_name, region, zone, project_name
         datalab.fab.append_result("Failed to terminate instances", str(err))
         sys.exit(1)
 
-    print("Removing static addresses")
+    logging.info("Removing static addresses")
     try:
         static_addresses = GCPMeta.get_list_static_addresses(region, base)
         if 'items' in static_addresses:
@@ -74,7 +74,7 @@ def terminate_edge_node(user_name, service_base_name, region, zone, project_name
         datalab.fab.append_result("Failed to remove static IPs", str(err))
         sys.exit(1)
 
-    print("Removing storage bucket")
+    logging.info("Removing storage bucket")
     try:
         buckets = GCPMeta.get_list_buckets(base)
         if 'items' in buckets:
@@ -85,7 +85,7 @@ def terminate_edge_node(user_name, service_base_name, region, zone, project_name
         datalab.fab.append_result("Failed to remove buckets", str(err))
         sys.exit(1)
 
-    print("Removing firewalls")
+    logging.info("Removing firewalls")
     try:
         firewalls = GCPMeta.get_list_firewalls(base)
         if 'items' in firewalls:
@@ -96,7 +96,7 @@ def terminate_edge_node(user_name, service_base_name, region, zone, project_name
         datalab.fab.append_result("Failed to remove security groups", str(err))
         sys.exit(1)
 
-    print("Removing Service accounts and roles")
+    logging.info("Removing Service accounts and roles")
     try:
         list_service_accounts = GCPMeta.get_list_service_accounts()
         for service_account in (set(targets) & set(list_service_accounts)):
@@ -110,7 +110,7 @@ def terminate_edge_node(user_name, service_base_name, region, zone, project_name
         datalab.fab.append_result("Failed to remove service accounts and roles", str(err))
         sys.exit(1)
 
-    print("Removing subnets")
+    logging.info("Removing subnets")
     try:
         list_subnets = GCPMeta.get_list_subnetworks(region, '', base)
         if 'items' in list_subnets:
@@ -126,17 +126,10 @@ def terminate_edge_node(user_name, service_base_name, region, zone, project_name
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/edge/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     # generating variables dictionary
     GCPMeta = datalab.meta_lib.GCPMeta()
     GCPActions = datalab.actions_lib.GCPActions()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     edge_conf = dict()
     edge_conf['service_base_name'] = (os.environ['conf_service_base_name'])
     edge_conf['edge_user_name'] = (os.environ['edge_user_name'])
@@ -147,7 +140,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[TERMINATE EDGE]')
-        print('[TERMINATE EDGE]')
         try:
             terminate_edge_node(edge_conf['edge_user_name'], edge_conf['service_base_name'],
                                 edge_conf['region'], edge_conf['zone'], edge_conf['project_name'],
@@ -164,7 +156,7 @@ if __name__ == "__main__":
             res = {"service_base_name": edge_conf['service_base_name'],
                    "user_name": edge_conf['edge_user_name'],
                    "Action": "Terminate edge node"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/jupyter_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/jupyter_configure.py
index e27f16b..9a85703 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/jupyter_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/jupyter_configure.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -33,12 +33,6 @@ import subprocess
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         GCPMeta = datalab.meta_lib.GCPMeta()
         GCPActions = datalab.actions_lib.GCPActions()
@@ -105,7 +99,6 @@ if __name__ == "__main__":
             notebook_config['sudo_group'] = 'wheel'
 
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             instance_hostname, notebook_config['ssh_key_path'], notebook_config['initial_user'],
             notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
@@ -123,7 +116,6 @@ if __name__ == "__main__":
     # configuring proxy on Notebook instance
     try:
         logging.info('[CONFIGURE PROXY ON JUPYTER INSTANCE]')
-        print('[CONFIGURE PROXY ON JUPYTER INSTANCE]')
         additional_config = {"proxy_host": edge_instance_name, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}"\
             .format(instance_hostname, notebook_config['instance_name'], notebook_config['ssh_key_path'],
@@ -141,7 +133,6 @@ if __name__ == "__main__":
     # updating repositories & installing python packages
     try:
         logging.info('[INSTALLING PREREQUISITES TO JUPYTER NOTEBOOK INSTANCE]')
-        print('[INSTALLING PREREQUISITES TO JUPYTER NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}". \
             format(instance_hostname, notebook_config['ssh_key_path'], notebook_config['datalab_ssh_user'],
                    os.environ['gcp_region'], edge_instance_private_ip)
@@ -158,7 +149,6 @@ if __name__ == "__main__":
     # installing and configuring jupiter and all dependencies
     try:
         logging.info('[CONFIGURE JUPYTER NOTEBOOK INSTANCE]')
-        print('[CONFIGURE JUPYTER NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} " \
                  "--region {} --spark_version {} " \
                  "--hadoop_version {} --os_user {} " \
@@ -181,7 +171,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": os.environ['project_name'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -199,7 +188,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, notebook_config['ssh_key_path'])
@@ -216,20 +204,20 @@ if __name__ == "__main__":
 
     if notebook_config['image_enabled'] == 'true':
         try:
-            print('[CREATING IMAGE]')
+            logging.info('[CREATING IMAGE]')
             primary_image_id = GCPMeta.get_image_by_name(notebook_config['expected_primary_image_name'])
             if primary_image_id == '':
-                print("Looks like it's first time we configure notebook server. Creating images.")
+                logging.info("Looks like it's first time we configure notebook server. Creating images.")
                 image_id_list = GCPActions.create_image_from_instance_disks(
                     notebook_config['expected_primary_image_name'], notebook_config['expected_secondary_image_name'],
                     notebook_config['instance_name'], notebook_config['zone'], notebook_config['image_labels'])
                 if image_id_list and image_id_list[0] != '':
-                    print("Image of primary disk was successfully created. It's ID is {}".format(image_id_list[0]))
+                    logging.info("Image of primary disk was successfully created. It's ID is {}".format(image_id_list[0]))
                 else:
-                    print("Looks like another image creating operation for your template have been started a "
+                    logging.info("Looks like another image creating operation for your template have been started a "
                           "moment ago.")
                 if image_id_list and image_id_list[1] != '':
-                    print("Image of secondary disk was successfully created. It's ID is {}".format(image_id_list[1]))
+                    logging.info("Image of secondary disk was successfully created. It's ID is {}".format(image_id_list[1]))
         except Exception as err:
             datalab.fab.append_result("Failed creating image.", str(err))
             GCPActions.remove_instance(notebook_config['instance_name'], notebook_config['zone'])
@@ -239,7 +227,7 @@ if __name__ == "__main__":
 
     if os.environ['gpu_enabled'] == 'True':
         try:
-            print('[INSTALLING GPU DRIVERS]')
+            logging.info('[INSTALLING GPU DRIVERS]')
             params = "--hostname {} --keyfile {} --os_user {}".format(
                 instance_hostname, notebook_config['ssh_key_path'], notebook_config['datalab_ssh_user'])
             try:
@@ -254,7 +242,6 @@ if __name__ == "__main__":
             sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         additional_info = {
             'instance_hostname': instance_hostname,
@@ -291,18 +278,17 @@ if __name__ == "__main__":
             notebook_config['exploratory_name'])
         jupyter_ungit_access_url = "https://" + edge_instance_hostname + "/{}-ungit/".format(
             notebook_config['exploratory_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['instance_name']))
-        print("Private IP: {}".format(ip_address))
-        print("Instance type: {}".format(notebook_config['instance_type']))
-        print("Key name: {}".format(notebook_config['key_name']))
-        print("User key name: {}".format(os.environ['project_name']))
-        print("Jupyter URL: {}".format(jupyter_ip_url))
-        print("Ungit URL: {}".format(ungit_ip_url))
-        print("ReverseProxyNotebook".format(jupyter_notebook_access_url))
-        print("ReverseProxyUngit".format(jupyter_ungit_access_url))
-        print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(
+        logging.info("Instance name: {}".format(notebook_config['instance_name']))
+        logging.info("Private IP: {}".format(ip_address))
+        logging.info("Instance type: {}".format(notebook_config['instance_type']))
+        logging.info("Key name: {}".format(notebook_config['key_name']))
+        logging.info("User key name: {}".format(os.environ['project_name']))
+        logging.info("Jupyter URL: {}".format(jupyter_ip_url))
+        logging.info("Ungit URL: {}".format(ungit_ip_url))
+        logging.info("ReverseProxyNotebook".format(jupyter_notebook_access_url))
+        logging.info("ReverseProxyUngit".format(jupyter_ungit_access_url))
+        logging.info('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(
             notebook_config['key_name'], notebook_config['datalab_ssh_user'], ip_address))
 
         with open("/root/result.json", 'w') as result:
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/jupyterlab_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/jupyterlab_configure.py
index d1f1db1..100999a 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/jupyterlab_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/jupyterlab_configure.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -33,12 +33,6 @@ import subprocess
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         GCPMeta = datalab.meta_lib.GCPMeta()
         GCPActions = datalab.actions_lib.GCPActions()
@@ -105,7 +99,6 @@ if __name__ == "__main__":
             notebook_config['sudo_group'] = 'wheel'
 
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             instance_hostname, notebook_config['ssh_key_path'], notebook_config['initial_user'],
             notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
@@ -123,7 +116,6 @@ if __name__ == "__main__":
     # configuring proxy on Notebook instance
     try:
         logging.info('[CONFIGURE PROXY ON JUPYTERLAB INSTANCE]')
-        print('[CONFIGURE PROXY ON JUPYTERLAB INSTANCE]')
         additional_config = {"proxy_host": edge_instance_name, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}"\
             .format(instance_hostname, notebook_config['instance_name'], notebook_config['ssh_key_path'],
@@ -141,7 +133,6 @@ if __name__ == "__main__":
     # updating repositories & installing python packages
     try:
         logging.info('[INSTALLING PREREQUISITES TO JUPYTERLAB NOTEBOOK INSTANCE]')
-        print('[INSTALLING PREREQUISITES TO JUPYTERLAB NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}". \
             format(instance_hostname, notebook_config['ssh_key_path'], notebook_config['datalab_ssh_user'],
                    os.environ['gcp_region'], edge_instance_private_ip)
@@ -158,7 +149,6 @@ if __name__ == "__main__":
     # installing and configuring jupiter and all dependencies
     try:
         logging.info('[CONFIGURE JUPYTER NOTEBOOK INSTANCE]')
-        print('[CONFIGURE JUPYTER NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --edge_ip {} " \
                  "--region {} --spark_version {} " \
                  "--hadoop_version {} --os_user {} " \
@@ -179,7 +169,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": os.environ['project_name'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -197,7 +186,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, notebook_config['ssh_key_path'])
@@ -214,20 +202,20 @@ if __name__ == "__main__":
 
     if notebook_config['image_enabled'] == 'true':
         try:
-            print('[CREATING IMAGE]')
+            logging.info('[CREATING IMAGE]')
             primary_image_id = GCPMeta.get_image_by_name(notebook_config['expected_primary_image_name'])
             if primary_image_id == '':
-                print("Looks like it's first time we configure notebook server. Creating images.")
+                logging.info("Looks like it's first time we configure notebook server. Creating images.")
                 image_id_list = GCPActions.create_image_from_instance_disks(
                     notebook_config['expected_primary_image_name'], notebook_config['expected_secondary_image_name'],
                     notebook_config['instance_name'], notebook_config['zone'], notebook_config['image_labels'])
                 if image_id_list and image_id_list[0] != '':
-                    print("Image of primary disk was successfully created. It's ID is {}".format(image_id_list[0]))
+                    logging.info("Image of primary disk was successfully created. It's ID is {}".format(image_id_list[0]))
                 else:
-                    print("Looks like another image creating operation for your template have been started a "
+                    logging.info("Looks like another image creating operation for your template have been started a "
                           "moment ago.")
                 if image_id_list and image_id_list[1] != '':
-                    print("Image of secondary disk was successfully created. It's ID is {}".format(image_id_list[1]))
+                    logging.info("Image of secondary disk was successfully created. It's ID is {}".format(image_id_list[1]))
         except Exception as err:
             datalab.fab.append_result("Failed creating image.", str(err))
             GCPActions.remove_instance(notebook_config['instance_name'], notebook_config['zone'])
@@ -236,7 +224,6 @@ if __name__ == "__main__":
             sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         additional_info = {
             'instance_hostname': instance_hostname,
@@ -265,7 +252,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[CONFIGURING PROXY FOR DOCKER]')
         logging.info('[CONFIGURING PROXY FOR DOCKER]')
         params = "--hostname {} " \
                  "--keyfile {} " \
@@ -285,7 +271,6 @@ if __name__ == "__main__":
 
 
     try:
-        print('[STARTING JUPYTER CONTAINER]')
         logging.info('[STARTING JUPYTER CONTAINER]')
         params = "--hostname {} " \
                  "--keyfile {} " \
@@ -312,18 +297,17 @@ if __name__ == "__main__":
             notebook_config['exploratory_name'])
         jupyter_ungit_acces_url = "http://" + edge_instance_hostname + "/{}-ungit/".format(
             notebook_config['exploratory_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['instance_name']))
-        print("Private IP: {}".format(ip_address))
-        print("Instance type: {}".format(notebook_config['instance_type']))
-        print("Key name: {}".format(notebook_config['key_name']))
-        print("User key name: {}".format(os.environ['project_name']))
-        print("JupyterLab URL: {}".format(jupyter_ip_url))
-        print("Ungit URL: {}".format(ungit_ip_url))
-        print("ReverseProxyNotebook".format(jupyter_notebook_acces_url))
-        print("ReverseProxyUngit".format(jupyter_ungit_acces_url))
-        print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(
+        logging.info("Instance name: {}".format(notebook_config['instance_name']))
+        logging.info("Private IP: {}".format(ip_address))
+        logging.info("Instance type: {}".format(notebook_config['instance_type']))
+        logging.info("Key name: {}".format(notebook_config['key_name']))
+        logging.info("User key name: {}".format(os.environ['project_name']))
+        logging.info("JupyterLab URL: {}".format(jupyter_ip_url))
+        logging.info("Ungit URL: {}".format(ungit_ip_url))
+        logging.info("ReverseProxyNotebook".format(jupyter_notebook_acces_url))
+        logging.info("ReverseProxyUngit".format(jupyter_ungit_acces_url))
+        logging.info('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(
             notebook_config['key_name'], notebook_config['datalab_ssh_user'], ip_address))
 
         with open("/root/result.json", 'w') as result:
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/project_prepare.py b/infrastructure-provisioning/src/general/scripts/gcp/project_prepare.py
index 91b5d32..d5a06c9 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/project_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/project_prepare.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -34,16 +34,10 @@ import subprocess
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/project/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         GCPMeta = datalab.meta_lib.GCPMeta()
         GCPActions = datalab.actions_lib.GCPActions()
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         project_conf = dict()
         project_conf['edge_unique_index'] = str(uuid.uuid4())[:5]
         project_conf['ps_unique_index'] = str(uuid.uuid4())[:5]
@@ -134,12 +128,12 @@ if __name__ == "__main__":
                 subprocess.run('echo "{0}" >> {1}{2}.pub'.format(project_conf['user_key'], os.environ['conf_key_dir'],
                                                         project_conf['project_name']), shell=True, check=True)
             except:
-                print("ADMINSs PUBLIC KEY DOES NOT INSTALLED")
+                logging.info("ADMINSs PUBLIC KEY DOES NOT INSTALLED")
         except KeyError:
-            print("ADMINSs PUBLIC KEY DOES NOT UPLOADED")
+            logging.info("ADMINSs PUBLIC KEY DOES NOT UPLOADED")
             sys.exit(1)
 
-        print("Will create exploratory environment with edge node as access point as following: ".format(json.dumps(
+        logging.info("Will create exploratory environment with edge node as access point as following: ".format(json.dumps(
             project_conf, sort_keys=True, indent=4, separators=(',', ': '))))
         logging.info(json.dumps(project_conf))
     except Exception as err:
@@ -148,7 +142,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATE SUBNET]')
-        print('[CREATE SUBNET]')
         params = "--subnet_name {} --region {} --vpc_selflink {} --prefix {} --vpc_cidr {} --user_subnets_range '{}'" \
                  .format(project_conf['private_subnet_name'], project_conf['region'], project_conf['vpc_selflink'],
                          project_conf['private_subnet_prefix'], project_conf['vpc_cidr'],
@@ -164,15 +157,14 @@ if __name__ == "__main__":
         try:
             GCPActions.remove_subnet(project_conf['private_subnet_name'], project_conf['region'])
         except:
-            print("Subnet hasn't been created.")
+            logging.info("Subnet hasn't been created.")
         datalab.fab.append_result("Failed to create subnet.", str(err))
         sys.exit(1)
 
-    print('NEW SUBNET CIDR CREATED: {}'.format(project_conf['private_subnet_cidr']))
+    logging.info('NEW SUBNET CIDR CREATED: {}'.format(project_conf['private_subnet_cidr']))
 
     try:
         logging.info('[CREATE SERVICE ACCOUNT AND ROLE FOR EDGE NODE]')
-        print('[CREATE SERVICE ACCOUNT AND ROLE FOR EDGE NODE]')
         params = "--service_account_name {} --role_name {} --unique_index {} --service_base_name {}".format(
             project_conf['edge_service_account_name'], project_conf['edge_role_name'],
             project_conf['edge_unique_index'], project_conf['service_base_name'])
@@ -188,14 +180,13 @@ if __name__ == "__main__":
                                               project_conf['service_base_name'])
             GCPActions.remove_role(project_conf['edge_role_name'])
         except:
-            print("Service account or role hasn't been created")
+            logging.info("Service account or role hasn't been created")
         GCPActions.remove_subnet(project_conf['private_subnet_name'], project_conf['region'])
         datalab.fab.append_result("Failed to creating service account and role.", str(err))
         sys.exit(1)
 
     try:
         logging.info('[CREATE SERVICE ACCOUNT AND ROLE FOR PRIVATE SUBNET]')
-        print('[CREATE SERVICE ACCOUNT AND ROLE FOR NOTEBOOK NODE]')
         params = "--service_account_name {} --role_name {} --policy_path {} --roles_path {} --unique_index {} " \
                  "--service_base_name {}".format(
                   project_conf['ps_service_account_name'], project_conf['ps_role_name'], project_conf['ps_policy_path'],
@@ -212,7 +203,7 @@ if __name__ == "__main__":
                                               project_conf['service_base_name'])
             GCPActions.remove_role(project_conf['ps_role_name'])
         except:
-            print("Service account or role hasn't been created")
+            logging.info("Service account or role hasn't been created")
         GCPActions.remove_service_account(project_conf['edge_service_account_name'],
                                           project_conf['service_base_name'])
         GCPActions.remove_role(project_conf['edge_role_name'])
@@ -222,7 +213,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATE FIREWALL FOR EDGE NODE]')
-        print('[CREATE FIREWALL FOR EDGE NODE]')
         firewall_rules = dict()
         firewall_rules['ingress'] = []
         firewall_rules['egress'] = []
@@ -322,7 +312,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATE FIREWALL FOR PRIVATE SUBNET]')
-        print('[CREATE FIREWALL FOR PRIVATE SUBNET]')
         firewall_rules = dict()
         firewall_rules['ingress'] = []
         firewall_rules['egress'] = []
@@ -404,7 +393,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATE BUCKETS]')
-        print('[CREATE BUCKETS]')
         project_conf['shared_bucket_tags'] = {
             project_conf['tag_name']: project_conf['shared_bucket_name'],
             "endpoint_tag": project_conf['endpoint_tag'],
@@ -453,7 +441,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[SET PERMISSIONS FOR USER AND SHARED BUCKETS]')
-        print('[SET PERMISSIONS FOR USER AND SHARED BUCKETS]')
         GCPActions.set_bucket_owner(project_conf['bucket_name'], project_conf['ps_service_account_name'],
                                     project_conf['service_base_name'])
         GCPActions.set_bucket_owner(project_conf['shared_bucket_name'], project_conf['ps_service_account_name'],
@@ -478,7 +465,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATING STATIC IP ADDRESS]')
-        print('[CREATING STATIC IP ADDRESS]')
         params = "--address_name {} --region {}".format(project_conf['static_address_name'], project_conf['region'])
         try:
             subprocess.run("~/scripts/{}.py {}".format('edge_create_static_ip', params), shell=True, check=True)
@@ -490,7 +476,7 @@ if __name__ == "__main__":
         try:
             GCPActions.remove_static_address(project_conf['static_address_name'], project_conf['region'])
         except:
-            print("Static IP address hasn't been created.")
+            logging.info("Static IP address hasn't been created.")
         GCPActions.remove_bucket(project_conf['bucket_name'])
         GCPActions.remove_firewall(project_conf['fw_edge_ingress_public'])
         GCPActions.remove_firewall(project_conf['fw_edge_ingress_internal'])
@@ -518,7 +504,6 @@ if __name__ == "__main__":
         project_conf['static_ip'] = \
             GCPMeta.get_static_address(project_conf['region'], project_conf['static_address_name'])['address']
         logging.info('[CREATE EDGE INSTANCE]')
-        print('[CREATE EDGE INSTANCE]')
         params = "--instance_name {} --region {} --zone {} --vpc_name {} --subnet_name {} --instance_size {} " \
                  "--ssh_key_path {} --initial_user {} --service_account_name {} --image_name {} --instance_class {} " \
                  "--static_ip {} --network_tag {} --labels '{}' --service_base_name {}".format(
@@ -554,7 +539,6 @@ if __name__ == "__main__":
     if os.environ['edge_is_nat'] == 'true':
         try:
             logging.info('[CREATE NAT ROUTE]')
-            print('[REATE NAT ROUTE]')
             nat_route_name = '{0}-{1}-{2}-nat-route'.format(project_conf['service_base_name'],
                                                                   project_conf['project_name'],
                                                                   project_conf['endpoint_name'])
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/project_terminate.py b/infrastructure-provisioning/src/general/scripts/gcp/project_terminate.py
index 7e0dbfc..144ec05 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/project_terminate.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/project_terminate.py
@@ -25,7 +25,7 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import requests
 import sys
@@ -33,7 +33,7 @@ import traceback
 
 
 def terminate_edge_node(endpoint_name, project_name, service_base_name, region, zone):
-    print("Terminating Dataengine-service clusters")
+    logging.info("Terminating Dataengine-service clusters")
     try:
         labels = [
             {'sbn': service_base_name},
@@ -43,14 +43,14 @@ def terminate_edge_node(endpoint_name, project_name, service_base_name, region,
         if clusters_list:
             for cluster_name in clusters_list:
                 GCPActions.delete_dataproc_cluster(cluster_name, region)
-                print('The Dataproc cluster {} has been terminated successfully'.format(cluster_name))
+                logging.info('The Dataproc cluster {} has been terminated successfully'.format(cluster_name))
         else:
-            print("There are no Dataproc clusters to terminate.")
+            logging.info("There are no Dataproc clusters to terminate.")
     except Exception as err:
         datalab.fab.append_result("Failed to terminate dataengine-service", str(err))
         sys.exit(1)
 
-    print("Terminating EDGE and notebook instances")
+    logging.info("Terminating EDGE and notebook instances")
     base = '{}-{}-{}'.format(service_base_name, project_name, endpoint_name)
     keys = ['edge', 'ps', 'static-ip', 'bucket', 'subnet']
     targets = ['{}-{}'.format(base, k) for k in keys]
@@ -64,7 +64,7 @@ def terminate_edge_node(endpoint_name, project_name, service_base_name, region,
         datalab.fab.append_result("Failed to terminate instances", str(err))
         sys.exit(1)
 
-    print("Removing static addresses")
+    logging.info("Removing static addresses")
     try:
         static_addresses = GCPMeta.get_list_static_addresses(region, base)
         if 'items' in static_addresses:
@@ -75,7 +75,7 @@ def terminate_edge_node(endpoint_name, project_name, service_base_name, region,
         datalab.fab.append_result("Failed to remove static addresses", str(err))
         sys.exit(1)
 
-    print("Removing storage bucket")
+    logging.info("Removing storage bucket")
     try:
         buckets = GCPMeta.get_list_buckets(base)
         if 'items' in buckets:
@@ -86,7 +86,7 @@ def terminate_edge_node(endpoint_name, project_name, service_base_name, region,
         datalab.fab.append_result("Failed to remove storage buckets", str(err))
         sys.exit(1)
 
-    print("Removing project specific images")
+    logging.info("Removing project specific images")
     try:
         project_image_name_beginning = '{}-{}-{}'.format(service_base_name, project_name, endpoint_name)
         images = GCPMeta.get_list_images(project_image_name_beginning)
@@ -94,10 +94,10 @@ def terminate_edge_node(endpoint_name, project_name, service_base_name, region,
             for i in images['items']:
                 GCPActions.remove_image(i['name'])
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.info('Error: {0}'.format(err))
         sys.exit(1)
 
-    print("Removing firewalls")
+    logging.info("Removing firewalls")
     try:
         firewalls = GCPMeta.get_list_firewalls(base)
         if 'items' in firewalls:
@@ -108,7 +108,7 @@ def terminate_edge_node(endpoint_name, project_name, service_base_name, region,
         datalab.fab.append_result("Failed to remove security groups", str(err))
         sys.exit(1)
 
-    print("Removing Service accounts and roles")
+    logging.info("Removing Service accounts and roles")
     try:
         list_service_accounts = GCPMeta.get_list_service_accounts()
         sa_keys = ['edge-sa', 'ps-sa']
@@ -125,7 +125,7 @@ def terminate_edge_node(endpoint_name, project_name, service_base_name, region,
         datalab.fab.append_result("Failed to remove service accounts and roles", str(err))
         sys.exit(1)
 
-    print("Removing subnets")
+    logging.info("Removing subnets")
     try:
         list_subnets = GCPMeta.get_list_subnetworks(region, '', base)
         if 'items' in list_subnets:
@@ -139,7 +139,7 @@ def terminate_edge_node(endpoint_name, project_name, service_base_name, region,
         datalab.fab.append_result("Failed to remove subnets", str(err))
         sys.exit(1)
 
-    print("Removing nat route")
+    logging.info("Removing nat route")
     try:
         nat_route_name = '{0}-{1}-{2}-nat-route'.format(service_base_name, project_name, endpoint_name)
         route = GCPMeta.get_route(nat_route_name)
@@ -151,17 +151,10 @@ def terminate_edge_node(endpoint_name, project_name, service_base_name, region,
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/project/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     # generating variables dictionary
     GCPMeta = datalab.meta_lib.GCPMeta()
     GCPActions = datalab.actions_lib.GCPActions()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     project_conf = dict()
     project_conf['service_base_name'] = (os.environ['conf_service_base_name'])
     project_conf['project_name'] = (os.environ['project_name']).replace('_', '-').lower()
@@ -172,7 +165,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[TERMINATE EDGE]')
-        print('[TERMINATE EDGE]')
         try:
             terminate_edge_node(project_conf['endpoint_name'], project_conf['project_name'],
                                 project_conf['service_base_name'],
@@ -181,11 +173,10 @@ if __name__ == "__main__":
             traceback.print_exc()
             datalab.fab.append_result("Failed to terminate edge.", str(err))
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.info('Error: {0}'.format(err))
         sys.exit(1)
 
     try:
-        print('[KEYCLOAK PROJECT CLIENT DELETE]')
         logging.info('[KEYCLOAK PROJECT CLIENT DELETE]')
         keycloak_auth_server_url = '{}/realms/master/protocol/openid-connect/token'.format(
             os.environ['keycloak_auth_server_url'])
@@ -220,14 +211,14 @@ if __name__ == "__main__":
                                           headers={"Authorization": "Bearer " + keycloak_token.get("access_token"),
                                                    "Content-Type": "application/json"})
     except Exception as err:
-        print("Failed to remove project client from Keycloak", str(err))
+        logging.error("Failed to remove project client from Keycloak", str(err))
 
     try:
         with open("/root/result.json", 'w') as result:
             res = {"service_base_name": project_conf['service_base_name'],
                    "project_name": project_conf['project_name'],
                    "Action": "Terminate project"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/rstudio_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/rstudio_configure.py
index 3991d50..dae62df 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/rstudio_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/rstudio_configure.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -34,12 +34,6 @@ from fabric import *
 
 if __name__ == "__main__":
     instance_class = 'notebook'
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         GCPMeta = datalab.meta_lib.GCPMeta()
         GCPActions = datalab.actions_lib.GCPActions()
@@ -108,7 +102,6 @@ if __name__ == "__main__":
             notebook_config['sudo_group'] = 'wheel'
 
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             instance_hostname, notebook_config['ssh_key_path'], notebook_config['initial_user'],
             notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
@@ -126,7 +119,6 @@ if __name__ == "__main__":
     # configuring proxy on Notebook instance
     try:
         logging.info('[CONFIGURE PROXY ON RSTUDIO INSTANCE]')
-        print('[CONFIGURE PROXY ON RSTUDIO INSTANCE]')
         additional_config = {"proxy_host": edge_instance_name, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}" \
             .format(instance_hostname, notebook_config['instance_name'], notebook_config['ssh_key_path'],
@@ -145,7 +137,6 @@ if __name__ == "__main__":
     # updating repositories & installing python packages
     try:
         logging.info('[INSTALLING PREREQUISITES TO RSTUDIO NOTEBOOK INSTANCE]')
-        print('[INSTALLING PREREQUISITES TO RSTUDIO NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}". \
             format(instance_hostname, notebook_config['ssh_key_path'], notebook_config['datalab_ssh_user'],
                    os.environ['gcp_region'], edge_instance_private_ip)
@@ -162,7 +153,6 @@ if __name__ == "__main__":
     # installing and configuring RStudio and all dependencies
     try:
         logging.info('[CONFIGURE RSTUDIO NOTEBOOK INSTANCE]')
-        print('[CONFIGURE RSTUDIO NOTEBOOK INSTANCE]')
         params = "--hostname {0}  --keyfile {1} " \
                  "--region {2} --rstudio_pass {3} " \
                  "--rstudio_version {4} --os_user {5} " \
@@ -183,7 +173,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": os.environ['project_name'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -201,7 +190,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, notebook_config['ssh_key_path'])
@@ -218,20 +206,20 @@ if __name__ == "__main__":
         
     if notebook_config['image_enabled'] == 'true':
         try:
-            print('[CREATING IMAGE]')
+            logging.info('[CREATING IMAGE]')
             primary_image_id = GCPMeta.get_image_by_name(notebook_config['expected_primary_image_name'])
             if primary_image_id == '':
-                print("Looks like it's first time we configure notebook server. Creating images.")
+                logging.info("Looks like it's first time we configure notebook server. Creating images.")
                 image_id_list = GCPActions.create_image_from_instance_disks(
                     notebook_config['expected_primary_image_name'], notebook_config['expected_secondary_image_name'],
                     notebook_config['instance_name'], notebook_config['zone'], notebook_config['image_labels'])
                 if image_id_list and image_id_list[0] != '':
-                    print("Image of primary disk was successfully created. It's ID is {}".format(image_id_list[0]))
+                    logging.info("Image of primary disk was successfully created. It's ID is {}".format(image_id_list[0]))
                 else:
-                    print("Looks like another image creating operation for your template have been started a "
+                    logging.info("Looks like another image creating operation for your template have been started a "
                           "moment ago.")
                 if image_id_list and image_id_list[1] != '':
-                    print("Image of secondary disk was successfully created. It's ID is {}".format(image_id_list[1]))
+                    logging.info("Image of secondary disk was successfully created. It's ID is {}".format(image_id_list[1]))
         except Exception as err:
             datalab.fab.append_result("Failed creating image.", str(err))
             GCPActions.remove_instance(notebook_config['instance_name'], notebook_config['zone'])
@@ -240,7 +228,6 @@ if __name__ == "__main__":
             sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         additional_info = {
             'instance_hostname': instance_hostname,
@@ -264,7 +251,7 @@ if __name__ == "__main__":
             datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.info('Error: {0}'.format(err))
         datalab.fab.append_result("Failed to set edge reverse proxy template.", str(err))
         GCPActions.remove_instance(notebook_config['instance_name'], notebook_config['zone'])
         sys.exit(1)
@@ -278,18 +265,17 @@ if __name__ == "__main__":
             notebook_config['exploratory_name'])
         rstudio_ungit_access_url = "https://" + edge_instance_hostname + "/{}-ungit/".format(
             notebook_config['exploratory_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['instance_name']))
-        print("Private IP: {}".format(ip_address))
-        print("Instance type: {}".format(notebook_config['instance_type']))
-        print("Key name: {}".format(notebook_config['key_name']))
-        print("User key name: {}".format(os.environ['project_name']))
-        print("Rstudio URL: {}".format(rstudio_ip_url))
-        print("Rstudio user: {}".format(notebook_config['datalab_ssh_user']))
-        print("Rstudio pass: {}".format(notebook_config['rstudio_pass']))
-        print("Ungit URL: {}".format(ungit_ip_url))
-        print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(
+        logging.info("Instance name: {}".format(notebook_config['instance_name']))
+        logging.info("Private IP: {}".format(ip_address))
+        logging.info("Instance type: {}".format(notebook_config['instance_type']))
+        logging.info("Key name: {}".format(notebook_config['key_name']))
+        logging.info("User key name: {}".format(os.environ['project_name']))
+        logging.info("Rstudio URL: {}".format(rstudio_ip_url))
+        logging.info("Rstudio user: {}".format(notebook_config['datalab_ssh_user']))
+        logging.info("Rstudio pass: {}".format(notebook_config['rstudio_pass']))
+        logging.info("Ungit URL: {}".format(ungit_ip_url))
+        logging.info('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(
             notebook_config['key_name'], notebook_config['datalab_ssh_user'], ip_address))
 
         with open("/root/result.json", 'w') as result:
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/rstudio_dataengine-service_create_configs.py b/infrastructure-provisioning/src/general/scripts/gcp/rstudio_dataengine-service_create_configs.py
index 11e5283..d326408 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/rstudio_dataengine-service_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/rstudio_dataengine-service_create_configs.py
@@ -29,6 +29,7 @@ from datalab.actions_lib import *
 from datalab.common_lib import *
 from datalab.fab import *
 from datalab.notebook_lib import *
+from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -62,7 +63,7 @@ def configure_rstudio():
             subprocess.run('''R -e "source('/home/{}/.Rprofile')"'''.format(args.os_user), shell=True, check=True)
             subprocess.run('touch /home/' + args.os_user + '/.ensure_dir/rstudio_dataengine-service_ensured', shell=True, check=True)
         except Exception as err:
-            print('Error: {0}'.format(err))
+            logging.error('Error: {0}'.format(err))
             sys.exit(1)
     else:
         try:
@@ -76,7 +77,7 @@ def configure_rstudio():
             subprocess.run('echo \'HADOOP_CONF_DIR="' + yarn_dir + '"\' >> /home/' + args.os_user + '/.Renviron', shell=True, check=True)
             subprocess.run('''R -e "source('/home/{}/.Rprofile')"'''.format(args.os_user), shell=True, check=True)
         except Exception as err:
-            print('Error:', str(err))
+            logging.error('Error:', str(err))
             sys.exit(1)
 
 
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/ssn_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/ssn_configure.py
index 0afb38b..31e0876 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/ssn_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/ssn_configure.py
@@ -26,7 +26,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -39,12 +39,6 @@ parser.add_argument('--ssn_unique_index', type=str, default='')
 args = parser.parse_args()
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}.log".format(os.environ['conf_resource'], os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     def clear_resources():
         GCPActions.remove_instance(ssn_conf['instance_name'], ssn_conf['zone'])
         GCPActions.remove_static_address(ssn_conf['static_address_name'], ssn_conf['region'])
@@ -63,7 +57,6 @@ if __name__ == "__main__":
         GCPMeta = datalab.meta_lib.GCPMeta()
         GCPActions = datalab.actions_lib.GCPActions()
         logging.info('[DERIVING NAMES]')
-        print('[DERIVING NAMES]')
         ssn_conf = dict()
         ssn_conf['instance'] = 'ssn'
         ssn_conf['pre_defined_vpc'] = False
@@ -149,7 +142,6 @@ if __name__ == "__main__":
             ssn_conf['sudo_group'] = 'wheel'
 
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             ssn_conf['instance_hostname'], ssn_conf['ssh_key_path'], ssn_conf['initial_user'],
             ssn_conf['datalab_ssh_user'], ssn_conf['sudo_group'])
@@ -166,7 +158,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[INSTALLING PREREQUISITES TO SSN INSTANCE]')
-        print('[INSTALLING PREREQUISITES TO SSN INSTANCE]')
         params = "--hostname {} --keyfile {} --pip_packages " \
                  "'boto3 bcrypt==3.1.7 backoff argparse fabric awscli pymongo pyyaml " \
                  "google-api-python-client google-cloud-storage pycryptodome' --user {} --region {}". \
@@ -185,7 +176,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CONFIGURE SSN INSTANCE]')
-        print('[CONFIGURE SSN INSTANCE]')
         additional_config = {"nginx_template_dir": "/root/templates/",
                              "service_base_name": ssn_conf['service_base_name'],
                              "security_group_id": ssn_conf['firewall_name'], "vpc_id": ssn_conf['vpc_name'],
@@ -208,7 +198,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CONFIGURING DOCKER AT SSN INSTANCE]')
-        print('[CONFIGURING DOCKER AT SSN INSTANCE]')
         additional_config = [{"name": "base", "tag": "latest"},
                              {"name": "project", "tag": "latest"},
                              {"name": "edge", "tag": "latest"},
@@ -240,7 +229,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CONFIGURE SSN INSTANCE UI]')
-        print('[CONFIGURE SSN INSTANCE UI]')
 
         cloud_params = [
             {
@@ -532,7 +520,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     logging.info('[CREATE KEYCLOAK CLIENT]')
-    print('[CREATE KEYCLOAK CLIENT]')
     keycloak_params = "--service_base_name {} --keycloak_auth_server_url {} --keycloak_realm_name {} " \
                       "--keycloak_user {} --keycloak_user_password {} --instance_public_ip {} --keycloak_client_secret {} " \
         .format(ssn_conf['service_base_name'], os.environ['keycloak_auth_server_url'],
@@ -548,29 +535,28 @@ if __name__ == "__main__":
 
     try:
         logging.info('[SUMMARY]')
-        print('[SUMMARY]')
-        print("Service base name: {}".format(ssn_conf['service_base_name']))
-        print("SSN Name: {}".format(ssn_conf['instance_name']))
-        print("SSN Hostname: {}".format(ssn_conf['instance_hostname']))
-        print("Role name: {}".format(ssn_conf['role_name']))
-        print("Key name: {}".format(os.environ['conf_key_name']))
-        print("VPC Name: {}".format(ssn_conf['vpc_name']))
-        print("Subnet Name: {}".format(ssn_conf['subnet_name']))
-        print("Firewall Names: {}".format(ssn_conf['firewall_name']))
-        print("SSN instance size: {}".format(ssn_conf['instance_size']))
-        print("SSN AMI name: {}".format(ssn_conf['image_name']))
-        print("Region: {}".format(ssn_conf['region']))
+        logging.info("Service base name: {}".format(ssn_conf['service_base_name']))
+        logging.info("SSN Name: {}".format(ssn_conf['instance_name']))
+        logging.info("SSN Hostname: {}".format(ssn_conf['instance_hostname']))
+        logging.info("Role name: {}".format(ssn_conf['role_name']))
+        logging.info("Key name: {}".format(os.environ['conf_key_name']))
+        logging.info("VPC Name: {}".format(ssn_conf['vpc_name']))
+        logging.info("Subnet Name: {}".format(ssn_conf['subnet_name']))
+        logging.info("Firewall Names: {}".format(ssn_conf['firewall_name']))
+        logging.info("SSN instance size: {}".format(ssn_conf['instance_size']))
+        logging.info("SSN AMI name: {}".format(ssn_conf['image_name']))
+        logging.info("Region: {}".format(ssn_conf['region']))
         jenkins_url = "http://{}/jenkins".format(ssn_conf['instance_hostname'])
         jenkins_url_https = "https://{}/jenkins".format(ssn_conf['instance_hostname'])
-        print("Jenkins URL: {}".format(jenkins_url))
-        print("Jenkins URL HTTPS: {}".format(jenkins_url_https))
-        print("DataLab UI HTTP URL: http://{}".format(ssn_conf['instance_hostname']))
-        print("DataLab UI HTTPS URL: https://{}".format(ssn_conf['instance_hostname']))
+        logging.info("Jenkins URL: {}".format(jenkins_url))
+        logging.info("Jenkins URL HTTPS: {}".format(jenkins_url_https))
+        logging.info("DataLab UI HTTP URL: http://{}".format(ssn_conf['instance_hostname']))
+        logging.info("DataLab UI HTTPS URL: https://{}".format(ssn_conf['instance_hostname']))
         try:
             with open('jenkins_creds.txt') as f:
-                print(f.read())
+                logging.info(f.read())
         except:
-            print("Jenkins is either configured already or have issues in configuration routine.")
+            logging.info("Jenkins is either configured already or have issues in configuration routine.")
 
         with open("/root/result.json", 'w') as f:
             res = {"service_base_name": ssn_conf['service_base_name'],
@@ -586,7 +572,7 @@ if __name__ == "__main__":
                    "action": "Create SSN instance"}
             f.write(json.dumps(res))
 
-        print('Upload response file')
+        logging.info('Upload response file')
         params = "--instance_name {} --local_log_filepath {} --os_user {} --instance_hostname {}". \
             format(ssn_conf['instance_name'], local_log_filepath, ssn_conf['datalab_ssh_user'],
                    ssn_conf['instance_hostname'])
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/ssn_create_static_ip.py b/infrastructure-provisioning/src/general/scripts/gcp/ssn_create_static_ip.py
index 58bf787..3564fb9 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/ssn_create_static_ip.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/ssn_create_static_ip.py
@@ -27,6 +27,7 @@ import sys
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--address_name', type=str, default='')
@@ -34,17 +35,12 @@ parser.add_argument('--region', type=str, default='')
 args = parser.parse_args()
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}.log".format(os.environ['conf_resource'], os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         if GCPMeta().get_static_address(args.region, args.address_name):
-            print("REQUESTED STATIC ADDRESS {} ALREADY EXISTS".format(args.address_name))
+            logging.info("REQUESTED STATIC ADDRESS {} ALREADY EXISTS".format(args.address_name))
         else:
-            print("Creating Elastic IP")
+            logging.info("Creating Elastic IP")
             GCPActions().create_static_address(args.address_name, args.region)
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/ssn_create_vpc.py b/infrastructure-provisioning/src/general/scripts/gcp/ssn_create_vpc.py
index d6bd97d..a453b36 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/ssn_create_vpc.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/ssn_create_vpc.py
@@ -24,6 +24,7 @@
 import argparse
 from datalab.actions_lib import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--vpc_name', type=str, default='')
@@ -32,10 +33,10 @@ args = parser.parse_args()
 if __name__ == "__main__":
     if args.vpc_name != '':
         if GCPMeta().get_vpc(args.vpc_name):
-            print("REQUESTED VPC {} ALREADY EXISTS".format(args.vpc_name))
+            logging.info("REQUESTED VPC {} ALREADY EXISTS".format(args.vpc_name))
         else:
-            print("Creating VPC {}".format(args.vpc_name))
+            logging.info("Creating VPC {}".format(args.vpc_name))
             GCPActions().create_vpc(args.vpc_name)
     else:
-        print("VPC name can't be empty.")
+        logging.error("VPC name can't be empty.")
         sys.exit(1)
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/ssn_finalize.py b/infrastructure-provisioning/src/general/scripts/gcp/ssn_finalize.py
index 93761f1..a29d398 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/ssn_finalize.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/ssn_finalize.py
@@ -25,6 +25,7 @@ import argparse
 import boto3
 import sys
 from datalab.ssn_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--key_id', type=str, default='')
@@ -37,7 +38,7 @@ def cleanup(key_id):
         current_user = iam.CurrentUser()
         for user_key in current_user.access_keys.all():
             if user_key.id == key_id:
-                print("Deleted key {}".format(user_key.id))
+                logging.info("Deleted key {}".format(user_key.id))
                 user_key.delete()
         return True
     except:
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/ssn_prepare.py b/infrastructure-provisioning/src/general/scripts/gcp/ssn_prepare.py
index f8a269c..59f0658 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/ssn_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/ssn_prepare.py
@@ -26,7 +26,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -38,11 +38,6 @@ parser.add_argument('--ssn_unique_index', type=str, default='')
 args = parser.parse_args()
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}.log".format(os.environ['conf_resource'], os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         GCPMeta = datalab.meta_lib.GCPMeta()
         GCPActions = datalab.actions_lib.GCPActions()
@@ -52,7 +47,6 @@ if __name__ == "__main__":
         ssn_conf['pre_defined_subnet'] = False
         ssn_conf['pre_defined_firewall'] = False
         logging.info('[DERIVING NAMES]')
-        print('[DERIVING NAMES]')
         ssn_conf['ssn_unique_index'] = args.ssn_unique_index
         ssn_conf['service_base_name'] = os.environ['conf_service_base_name'] = datalab.fab.replace_multi_symbols(
             os.environ['conf_service_base_name'].replace('_', '-').lower()[:20], '-', True)
@@ -95,7 +89,6 @@ if __name__ == "__main__":
     except KeyError:
         try:
             logging.info('[CREATE VPC]')
-            print('[CREATE VPC]')
             params = "--vpc_name {}".format(ssn_conf['vpc_name'])
             try:
                 subprocess.run("~/scripts/{}.py {}".format('ssn_create_vpc', params), shell=True, check=True)
@@ -109,7 +102,7 @@ if __name__ == "__main__":
                 try:
                     GCPActions.remove_vpc(ssn_conf['vpc_name'])
                 except:
-                    print("VPC hasn't been created.")
+                    logging.error("VPC hasn't been created.")
             sys.exit(1)
 
     try:
@@ -122,7 +115,6 @@ if __name__ == "__main__":
     except KeyError:
         try:
             logging.info('[CREATE SUBNET]')
-            print('[CREATE SUBNET]')
             params = "--subnet_name {} --region {} --vpc_selflink {} --prefix {} --vpc_cidr {} --ssn {}".\
                 format(ssn_conf['subnet_name'], ssn_conf['region'], ssn_conf['vpc_selflink'], ssn_conf['subnet_prefix'],
                        ssn_conf['vpc_cidr'], True)
@@ -138,7 +130,7 @@ if __name__ == "__main__":
                 try:
                     GCPActions.remove_subnet(ssn_conf['subnet_name'], ssn_conf['region'])
                 except:
-                    print("Subnet hasn't been created.")
+                    logging.error("Subnet hasn't been created.")
             if not ssn_conf['pre_defined_vpc']:
                 GCPActions.remove_vpc(ssn_conf['vpc_name'])
             sys.exit(1)
@@ -153,7 +145,6 @@ if __name__ == "__main__":
     except KeyError:
         try:
             logging.info('[CREATE FIREWALL]')
-            print('[CREATE FIREWALL]')
             if os.environ['conf_allowed_ip_cidr'] != '0.0.0.0/0':
                 ssn_conf['allowed_ip_cidr'] = ssn_conf['allowed_ip_cidr'].split(', ')
             else:
@@ -208,7 +199,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATE SERVICE ACCOUNT AND ROLE]')
-        print('[CREATE SERVICE ACCOUNT AND ROLE]')
         params = "--service_account_name {} --role_name {} --policy_path {} --roles_path {} --unique_index {} " \
                  "--service_base_name {}".format( ssn_conf['service_account_name'], ssn_conf['role_name'],
                                                   ssn_conf['ssn_policy_path'], ssn_conf['ssn_roles_path'],
@@ -224,7 +214,7 @@ if __name__ == "__main__":
             GCPActions.remove_service_account(ssn_conf['service_account_name'], ssn_conf['service_base_name'])
             GCPActions.remove_role(ssn_conf['role_name'])
         except:
-            print("Service account hasn't been created")
+            logging.error("Service account hasn't been created")
         if not ssn_conf['pre_defined_firewall']:
             GCPActions.remove_firewall('{}-ingress'.format(ssn_conf['firewall_name']))
             GCPActions.remove_firewall('{}-egress'.format(ssn_conf['firewall_name']))
@@ -236,7 +226,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATING STATIC IP ADDRESS]')
-        print('[CREATING STATIC IP ADDRESS]')
         params = "--address_name {} --region {}".format(ssn_conf['static_address_name'], ssn_conf['region'])
         try:
             subprocess.run("~/scripts/{}.py {}".format('ssn_create_static_ip', params), shell=True, check=True)
@@ -248,7 +237,7 @@ if __name__ == "__main__":
         try:
             GCPActions.remove_static_address(ssn_conf['static_address_name'], ssn_conf['region'])
         except:
-            print("Static IP address hasn't been created.")
+            logging.error("Static IP address hasn't been created.")
         GCPActions.remove_service_account(ssn_conf['service_account_name'], ssn_conf['service_base_name'])
         GCPActions.remove_role(ssn_conf['role_name'])
         GCPActions.remove_bucket(ssn_conf['ssn_bucket_name'])
@@ -273,7 +262,6 @@ if __name__ == "__main__":
         ssn_conf['static_ip'] = GCPMeta.get_static_address(ssn_conf['region'],
                                                            ssn_conf['static_address_name'])['address']
         logging.info('[CREATE SSN INSTANCE]')
-        print('[CREATE SSN INSTANCE]')
         params = "--instance_name {0} --region {1} --zone {2} --vpc_name {3} --subnet_name {4} --instance_size {5}"\
                  " --ssh_key_path {6} --initial_user {7} --service_account_name {8} --image_name {9}"\
                  " --instance_class {10} --static_ip {11} --network_tag {12} --labels '{13}' " \
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/ssn_terminate.py b/infrastructure-provisioning/src/general/scripts/gcp/ssn_terminate.py
index e703df4..6f6c9cd 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/ssn_terminate.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/ssn_terminate.py
@@ -23,7 +23,7 @@
 
 import datalab.ssn_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -31,13 +31,8 @@ import subprocess
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}.log".format(os.environ['conf_resource'], os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     # generating variables dictionary
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     ssn_conf = dict()
     ssn_conf['service_base_name'] = datalab.fab.replace_multi_symbols(
         os.environ['conf_service_base_name'].replace('_', '-').lower()[:20], '-', True)
@@ -55,7 +50,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[TERMINATE SSN]')
-        print('[TERMINATE SSN]')
         params = "--service_base_name {} --region {} --zone {} --pre_defined_vpc {} --vpc_name {}".format(
             ssn_conf['service_base_name'], ssn_conf['region'], ssn_conf['zone'], pre_defined_vpc, ssn_conf['vpc_name'])
         try:
@@ -68,7 +62,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[KEYCLOAK SSN CLIENT DELETE]')
         logging.info('[KEYCLOAK SSN CLIENT DELETE]')
         keycloak_auth_server_url = '{}/realms/master/protocol/openid-connect/token'.format(os.environ['keycloak_auth_server_url'])
         keycloak_client_url = '{0}/admin/realms/{1}/clients'.format(os.environ['keycloak_auth_server_url'],
@@ -87,13 +80,13 @@ if __name__ == "__main__":
         keycloak_client = requests.delete(keycloak_client_delete_url, headers={"Authorization": "Bearer {}"
                                           .format(keycloak_token.get("access_token")), "Content-Type": "application/json"})
     except Exception as err:
-        print("Failed to remove ssn client from Keycloak", str(err))
+        logging.error("Failed to remove ssn client from Keycloak", str(err))
 
     try:
         with open("/root/result.json", 'w') as result:
             res = {"service_base_name": ssn_conf['service_base_name'],
                    "Action": "Terminate ssn with all service_base_name environment"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/ssn_terminate_gcp_resources.py b/infrastructure-provisioning/src/general/scripts/gcp/ssn_terminate_gcp_resources.py
index 5464ec3..c0938f6 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/ssn_terminate_gcp_resources.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/ssn_terminate_gcp_resources.py
@@ -26,6 +26,7 @@ import sys
 from datalab.actions_lib import *
 from datalab.meta_lib import *
 from datalab.ssn_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--zone', type=str)
@@ -43,7 +44,7 @@ args = parser.parse_args()
 if __name__ == "__main__":
     GCPMeta = datalab.meta_lib.GCPMeta()
     GCPActions = datalab.actions_lib.GCPActions()
-    print("Terminating Dataengine-service clusters")
+    logging.info("Terminating Dataengine-service clusters")
     try:
         labels = [
             {'sbn': args.service_base_name}
@@ -52,54 +53,54 @@ if __name__ == "__main__":
         if clusters_list:
             for cluster_name in clusters_list:
                 GCPActions.delete_dataproc_cluster(cluster_name, args.region)
-                print('The Dataproc cluster {} has been terminated successfully'.format(cluster_name))
+                logging.info('The Dataproc cluster {} has been terminated successfully'.format(cluster_name))
         else:
-            print("There are no Dataproc clusters to terminate.")
+            logging.info("There are no Dataproc clusters to terminate.")
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
 
-    print("Terminating instances")
+    logging.info("Terminating instances")
     try:
         instances = GCPMeta.get_list_instances(args.zone, args.service_base_name)
         if 'items' in instances:
             for i in instances['items']:
                 GCPActions.remove_instance(i['name'], args.zone)
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
 
-    print("Removing images")
+    logging.info("Removing images")
     try:
         images = GCPMeta.get_list_images(args.service_base_name)
         if 'items' in images:
             for i in images['items']:
                 GCPActions.remove_image(i['name'])
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
 
-    print("Removing static addresses")
+    logging.info("Removing static addresses")
     try:
         static_addresses = GCPMeta.get_list_static_addresses(args.region, args.service_base_name)
         if 'items' in static_addresses:
             for i in static_addresses['items']:
                 GCPActions.remove_static_address(i['name'], args.region)
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
 
-    print("Removing firewalls")
+    logging.info("Removing firewalls")
     try:
         firewalls = GCPMeta.get_list_firewalls(args.service_base_name)
         if 'items' in firewalls:
             for i in firewalls['items']:
                 GCPActions.remove_firewall(i['name'])
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
 
-    print("Removing Service accounts and roles")
+    logging.info("Removing Service accounts and roles")
     try:
         list_service_accounts = GCPMeta.get_list_service_accounts()
         for service_account in list_service_accounts:
@@ -110,10 +111,10 @@ if __name__ == "__main__":
             if role.startswith(args.service_base_name):
                 GCPActions.remove_role(role)
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
 
-    print("Removing subnets")
+    logging.info("Removing subnets")
     try:
         list_subnets = GCPMeta.get_list_subnetworks(args.region, '', args.service_base_name)
         if 'items' in list_subnets:
@@ -122,26 +123,26 @@ if __name__ == "__main__":
             for i in subnets['items']:
                 GCPActions.remove_subnet(i['name'], args.region)
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
 
-    print("Removing s3 buckets")
+    logging.info("Removing s3 buckets")
     try:
         buckets = GCPMeta.get_list_buckets(args.service_base_name)
         if 'items' in buckets:
             for i in buckets['items']:
                 GCPActions.remove_bucket(i['name'])
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
 
-    print("Removing SSN VPC")
+    logging.info("Removing SSN VPC")
     if args.pre_defined_vpc != 'True':
         try:
             GCPActions.remove_vpc(args.vpc_name)
         except Exception as err:
-            print('Error: {0}'.format(err))
-            print("No such VPC")
+            logging.error('Error: {0}'.format(err))
+            logging.error("No such VPC")
             sys.exit(1)
     else:
-        print('VPC is predefined, VPC will not be deleted')
+        logging.info('VPC is predefined, VPC will not be deleted')
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/superset_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/superset_configure.py
index 0f57a46..709a534 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/superset_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/superset_configure.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import requests
 import sys
@@ -35,12 +35,6 @@ import subprocess
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         GCPMeta = datalab.meta_lib.GCPMeta()
         GCPActions = datalab.actions_lib.GCPActions()
@@ -107,7 +101,6 @@ if __name__ == "__main__":
             notebook_config['sudo_group'] = 'wheel'
 
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             instance_hostname, notebook_config['ssh_key_path'], notebook_config['initial_user'],
             notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
@@ -125,7 +118,6 @@ if __name__ == "__main__":
     # configuring proxy on Notebook instance
     try:
         logging.info('[CONFIGURE PROXY ON SUPERSET INSTANCE]')
-        print('[CONFIGURE PROXY ON SUPERSET INSTANCE]')
         additional_config = {"proxy_host": edge_instance_private_ip, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}"\
             .format(instance_hostname, notebook_config['instance_name'], notebook_config['ssh_key_path'],
@@ -141,7 +133,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[CONFIGURE KEYCLOAK]')
         logging.info('[CONFIGURE KEYCLOAK]')
         keycloak_auth_server_url = '{}/realms/master/protocol/openid-connect/token'.format(
             os.environ['keycloak_auth_server_url'])
@@ -167,7 +158,7 @@ if __name__ == "__main__":
             json_keycloak_client_id = json.loads(keycloak_get_id_client.text)
             # Check, if response is not empty
             if len(json_keycloak_client_id) != 0:
-                print('Keycloak client {} exists. Getting his required attributes.'.format(keycloak_client_id))
+                logging.info('Keycloak client {} exists. Getting his required attributes.'.format(keycloak_client_id))
                 keycloak_id_client = json_keycloak_client_id[0]['id']
                 keycloak_client_get_secret_url = ("{0}/{1}/client-secret".format(keycloak_client_create_url,
                                                                                  keycloak_id_client))
@@ -178,7 +169,7 @@ if __name__ == "__main__":
                 json_keycloak_client_secret = json.loads(keycloak_client_get_secret.text)
                 keycloak_client_secret = json_keycloak_client_secret['value']
             else:
-                print('Keycloak client does not exists. Creating new client {0}.'.format(keycloak_client_id))
+                logging.info('Keycloak client does not exists. Creating new client {0}.'.format(keycloak_client_id))
                 keycloak_client_secret = str(uuid.uuid4())
                 keycloak_client_data = {
                     "clientId": keycloak_client_id,
@@ -201,7 +192,6 @@ if __name__ == "__main__":
     # updating repositories & installing and configuring superset
     try:
         logging.info('[CONFIGURE SUPERSET NOTEBOOK INSTANCE]')
-        print('[CONFIGURE SUPERSET NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} " \
                  "--region {} --os_user {} " \
                  "--datalab_path {} --keycloak_auth_server_url {} " \
@@ -225,7 +215,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": os.environ['project_name'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -243,7 +232,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, notebook_config['ssh_key_path'])
@@ -260,20 +248,20 @@ if __name__ == "__main__":
 
     if notebook_config['image_enabled'] == 'true':
         try:
-            print('[CREATING IMAGE]')
+            logging.info('[CREATING IMAGE]')
             primary_image_id = GCPMeta.get_image_by_name(notebook_config['expected_primary_image_name'])
             if primary_image_id == '':
-                print("Looks like it's first time we configure notebook server. Creating images.")
+                logging.info("Looks like it's first time we configure notebook server. Creating images.")
                 image_id_list = GCPActions.create_image_from_instance_disks(
                     notebook_config['expected_primary_image_name'], notebook_config['expected_secondary_image_name'],
                     notebook_config['instance_name'], notebook_config['zone'], notebook_config['image_labels'])
                 if image_id_list and image_id_list[0] != '':
-                    print("Image of primary disk was successfully created. It's ID is {}".format(image_id_list[0]))
+                    logging.info("Image of primary disk was successfully created. It's ID is {}".format(image_id_list[0]))
                 else:
-                    print("Looks like another image creating operation for your template have been started a "
+                    logging.info("Looks like another image creating operation for your template have been started a "
                           "moment ago.")
                 if image_id_list and image_id_list[1] != '':
-                    print("Image of secondary disk was successfully created. It's ID is {}".format(image_id_list[1]))
+                    logging.info("Image of secondary disk was successfully created. It's ID is {}".format(image_id_list[1]))
         except Exception as err:
             datalab.fab.append_result("Failed creating image.", str(err))
             GCPActions.remove_instance(notebook_config['instance_name'], notebook_config['zone'])
@@ -282,7 +270,6 @@ if __name__ == "__main__":
             sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         additional_info = {
             'instance_hostname': instance_hostname,
@@ -311,7 +298,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[CONFIGURING PROXY FOR DOCKER]')
         logging.info('[CONFIGURING PROXY FOR DOCKER]')
         params = "--hostname {} " \
                  "--keyfile {} " \
@@ -330,7 +316,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[STARTING SUPERSET]')
         logging.info('[STARTING SUPERSET]')
         params = "--hostname {} " \
                  "--keyfile {} " \
@@ -356,18 +341,17 @@ if __name__ == "__main__":
         superset_notebook_acces_url = "http://" + edge_instance_hostname + "/{}/".format(notebook_config['exploratory_name'])
         superset_ungit_acces_url = "http://" + edge_instance_hostname + "/{}-ungit/".format(
             notebook_config['exploratory_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['instance_name']))
-        print("Private IP: {}".format(ip_address))
-        print("Instance type: {}".format(notebook_config['instance_type']))
-        print("Key name: {}".format(notebook_config['key_name']))
-        print("User key name: {}".format(os.environ['project_name']))
-        print("SUPERSET URL: {}".format(superset_ip_url))
-        print("Ungit URL: {}".format(ungit_ip_url))
-        print("ReverseProxyNotebook".format(superset_notebook_acces_url))
-        print("ReverseProxyUngit".format(superset_ungit_acces_url))
-        print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(notebook_config['key_name'],
+        logging.info("Instance name: {}".format(notebook_config['instance_name']))
+        logging.info("Private IP: {}".format(ip_address))
+        logging.info("Instance type: {}".format(notebook_config['instance_type']))
+        logging.info("Key name: {}".format(notebook_config['key_name']))
+        logging.info("User key name: {}".format(os.environ['project_name']))
+        logging.info("SUPERSET URL: {}".format(superset_ip_url))
+        logging.info("Ungit URL: {}".format(ungit_ip_url))
+        logging.info("ReverseProxyNotebook".format(superset_notebook_acces_url))
+        logging.info("ReverseProxyUngit".format(superset_ungit_acces_url))
+        logging.info('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(notebook_config['key_name'],
                                                                                            notebook_config[
                                                                                                'datalab_ssh_user'],
                                                                                            ip_address))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/tensor-rstudio_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/tensor-rstudio_configure.py
index f201944..a1a990d 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/tensor-rstudio_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/tensor-rstudio_configure.py
@@ -26,7 +26,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -39,12 +39,6 @@ args = parser.parse_args()
 
 if __name__ == "__main__":
     instance_class = 'notebook'
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         GCPMeta = datalab.meta_lib.GCPMeta()
         GCPActions = datalab.actions_lib.GCPActions()
@@ -112,7 +106,6 @@ if __name__ == "__main__":
             notebook_config['sudo_group'] = 'wheel'
 
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             instance_hostname, "{}{}.pem".format(os.environ['conf_key_dir'], os.environ['conf_key_name']),
             notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
@@ -130,7 +123,6 @@ if __name__ == "__main__":
     # configuring proxy on Notebook instance
     try:
         logging.info('[CONFIGURE PROXY ON TENSORFLOW-RSTUDIO INSTANCE]')
-        print('[CONFIGURE PROXY ON TENSORFLOW-RSTUDIO INSTANCE]')
         additional_config = {"proxy_host": edge_instance_name, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}" \
             .format(instance_hostname, notebook_config['instance_name'], notebook_config['ssh_key_path'],
@@ -148,7 +140,6 @@ if __name__ == "__main__":
     # updating repositories & installing python packages
     try:
         logging.info('[INSTALLING PREREQUISITES TO TENSORFLOW-RSTUDIO NOTEBOOK INSTANCE]')
-        print('[INSTALLING PREREQUISITES TO TENSORFLOW-RSTUDIO NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}". \
             format(instance_hostname, notebook_config['ssh_key_path'], notebook_config['datalab_ssh_user'],
                    os.environ['gcp_region'], edge_instance_private_ip)
@@ -165,7 +156,6 @@ if __name__ == "__main__":
     # installing and configuring TensorFlow and RSTUDIO and all dependencies
     try:
         logging.info('[CONFIGURE TENSORFLOW-RSTUDIO NOTEBOOK INSTANCE]')
-        print('[CONFIGURE TENSORFLOW-RSTUDIO NOTEBOOK INSTANCE]')
         params = "--hostname {}  --keyfile {} " \
                  "--region {} --rstudio_pass {} " \
                  "--rstudio_version {} --os_user {} " \
@@ -185,7 +175,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": os.environ['project_name'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -203,7 +192,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, notebook_config['ssh_key_path'])
@@ -220,20 +208,20 @@ if __name__ == "__main__":
 
     if notebook_config['image_enabled'] == 'true':
         try:
-            print('[CREATING IMAGE]')
+            logging.info('[CREATING IMAGE]')
             primary_image_id = GCPMeta.get_image_by_name(notebook_config['expected_primary_image_name'])
             if primary_image_id == '':
-                print("Looks like it's first time we configure notebook server. Creating images.")
+                logging.info("Looks like it's first time we configure notebook server. Creating images.")
                 image_id_list = GCPActions.create_image_from_instance_disks(
                     notebook_config['expected_primary_image_name'], notebook_config['expected_secondary_image_name'],
                     notebook_config['instance_name'], notebook_config['zone'], notebook_config['image_labels'])
                 if image_id_list and image_id_list[0] != '':
-                    print("Image of primary disk was successfully created. It's ID is {}".format(image_id_list[0]))
+                    logging.info("Image of primary disk was successfully created. It's ID is {}".format(image_id_list[0]))
                 else:
-                    print(
+                    logging.info(
                         "Looks like another image creating operation for your template have been started a moment ago.")
                 if image_id_list and image_id_list[1] != '':
-                    print("Image of secondary disk was successfully created. It's ID is {}".format(image_id_list[1]))
+                    logging.info("Image of secondary disk was successfully created. It's ID is {}".format(image_id_list[1]))
         except Exception as err:
             datalab.fab.append_result("Failed creating image.", str(err))
             GCPActions.remove_instance(notebook_config['instance_name'], notebook_config['zone'])
@@ -242,7 +230,6 @@ if __name__ == "__main__":
             sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         additional_info = {
             'instance_hostname': instance_hostname,
@@ -282,20 +269,19 @@ if __name__ == "__main__":
             notebook_config['exploratory_name'])
         rstudio_ungit_access_url = "https://" + edge_instance_hostname + "/{}-ungit/".format(
             notebook_config['exploratory_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['instance_name']))
-        print("Private IP: {}".format(ip_address))
-        print("Instance type: {}".format(notebook_config['instance_type']))
-        print("Key name: {}".format(notebook_config['key_name']))
-        print("User key name: {}".format(os.environ['project_name']))
-        print("TensorBoard URL: {}".format(tensorboard_url))
-        print("TensorBoard log dir: /var/log/tensorboard")
-        print("Rstudio URL: {}".format(rstudio_ip_url))
-        print("Rstudio user: {}".format(notebook_config['datalab_ssh_user']))
-        print("Rstudio pass: {}".format(notebook_config['rstudio_pass']))
-        print("Ungit URL: {}".format(ungit_ip_url))
-        print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(
+        logging.info("Instance name: {}".format(notebook_config['instance_name']))
+        logging.info("Private IP: {}".format(ip_address))
+        logging.info("Instance type: {}".format(notebook_config['instance_type']))
+        logging.info("Key name: {}".format(notebook_config['key_name']))
+        logging.info("User key name: {}".format(os.environ['project_name']))
+        logging.info("TensorBoard URL: {}".format(tensorboard_url))
+        logging.info("TensorBoard log dir: /var/log/tensorboard")
+        logging.info("Rstudio URL: {}".format(rstudio_ip_url))
+        logging.info("Rstudio user: {}".format(notebook_config['datalab_ssh_user']))
+        logging.info("Rstudio pass: {}".format(notebook_config['rstudio_pass']))
+        logging.info("Ungit URL: {}".format(ungit_ip_url))
+        logging.info('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(
             notebook_config['key_name'], notebook_config['datalab_ssh_user'], ip_address))
 
         with open("/root/result.json", 'w') as result:
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/tensor_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/tensor_configure.py
index 9708b3b..dd67bfa 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/tensor_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/tensor_configure.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -34,12 +34,6 @@ from fabric import *
 
 if __name__ == "__main__":
     instance_class = 'notebook'
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         GCPMeta = datalab.meta_lib.GCPMeta()
         GCPActions = datalab.actions_lib.GCPActions()
@@ -106,7 +100,6 @@ if __name__ == "__main__":
             notebook_config['sudo_group'] = 'wheel'
 
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             instance_hostname, notebook_config['ssh_key_path'], notebook_config['initial_user'],
             notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
@@ -123,7 +116,6 @@ if __name__ == "__main__":
     # configuring proxy on Notebook instance
     try:
         logging.info('[CONFIGURE PROXY ON TENSOR INSTANCE]')
-        print('[CONFIGURE PROXY ON TENSOR INSTANCE]')
         additional_config = {"proxy_host": edge_instance_name, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}" \
             .format(instance_hostname, notebook_config['instance_name'], notebook_config['ssh_key_path'],
@@ -141,7 +133,6 @@ if __name__ == "__main__":
     # updating repositories & installing python packages
     try:
         logging.info('[INSTALLING PREREQUISITES TO TENSOR NOTEBOOK INSTANCE]')
-        print('[INSTALLING PREREQUISITES TO TENSOR NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}". \
             format(instance_hostname, notebook_config['ssh_key_path'], notebook_config['datalab_ssh_user'],
                    os.environ['gcp_region'], edge_instance_private_ip)
@@ -157,7 +148,7 @@ if __name__ == "__main__":
 
     #Installing GPU drivers
     try:
-        print('[INSTALLING GPU DRIVERS]')
+        logging.info('[INSTALLING GPU DRIVERS]')
         params = "--hostname {} --keyfile {} --os_user {}".format(
             instance_hostname, notebook_config['ssh_key_path'], notebook_config['datalab_ssh_user'])
         try:
@@ -174,7 +165,6 @@ if __name__ == "__main__":
     # installing and configuring TensorFlow and all dependencies
     try:
         logging.info('[CONFIGURE TENSORFLOW NOTEBOOK INSTANCE]')
-        print('[CONFIGURE TENSORFLOW NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --region {} --os_user {} --exploratory_name {} --edge_ip {}" \
                  .format(instance_hostname, notebook_config['ssh_key_path'],
                          os.environ['gcp_region'], notebook_config['datalab_ssh_user'],
@@ -190,7 +180,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": os.environ['project_name'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -208,7 +197,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, notebook_config['ssh_key_path'])
@@ -225,20 +213,20 @@ if __name__ == "__main__":
 
     if notebook_config['image_enabled'] == 'true':
         try:
-            print('[CREATING IMAGE]')
+            logging.info('[CREATING IMAGE]')
             primary_image_id = GCPMeta.get_image_by_name(notebook_config['expected_primary_image_name'])
             if primary_image_id == '':
-                print("Looks like it's first time we configure notebook server. Creating images.")
+                logging.info("Looks like it's first time we configure notebook server. Creating images.")
                 image_id_list = GCPActions.create_image_from_instance_disks(
                     notebook_config['expected_primary_image_name'], notebook_config['expected_secondary_image_name'],
                     notebook_config['instance_name'], notebook_config['zone'], notebook_config['image_labels'])
                 if image_id_list and image_id_list[0] != '':
-                    print("Image of primary disk was successfully created. It's ID is {}".format(image_id_list[0]))
+                    logging.info("Image of primary disk was successfully created. It's ID is {}".format(image_id_list[0]))
                 else:
-                    print("Looks like another image creating operation for your template have been started a "
+                    logging.info("Looks like another image creating operation for your template have been started a "
                           "moment ago.")
                 if image_id_list and image_id_list[1] != '':
-                    print("Image of secondary disk was successfully created. It's ID is {}".format(image_id_list[1]))
+                    logging.info("Image of secondary disk was successfully created. It's ID is {}".format(image_id_list[1]))
         except Exception as err:
             datalab.fab.append_result("Failed creating image.", str(err))
             GCPActions.remove_instance(notebook_config['instance_name'], notebook_config['zone'])
@@ -247,7 +235,6 @@ if __name__ == "__main__":
             sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         additional_info = {
             'instance_hostname': instance_hostname,
@@ -287,18 +274,17 @@ if __name__ == "__main__":
             notebook_config['exploratory_name'])
         jupyter_ungit_access_url = "https://" + edge_instance_hostname + "/{}-ungit/".format(
             notebook_config['exploratory_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['instance_name']))
-        print("Private IP: {}".format(ip_address))
-        print("Instance type: {}".format(notebook_config['instance_type']))
-        print("Key name: {}".format(notebook_config['key_name']))
-        print("User key name: {}".format(os.environ['project_name']))
-        print("TensorBoard URL: {}".format(tensorboard_url))
-        print("TensorBoard log dir: /var/log/tensorboard")
-        print("Jupyter URL: {}".format(jupyter_ip_url))
-        print("Ungit URL: {}".format(ungit_ip_url))
-        print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(
+        logging.info("Instance name: {}".format(notebook_config['instance_name']))
+        logging.info("Private IP: {}".format(ip_address))
+        logging.info("Instance type: {}".format(notebook_config['instance_type']))
+        logging.info("Key name: {}".format(notebook_config['key_name']))
+        logging.info("User key name: {}".format(os.environ['project_name']))
+        logging.info("TensorBoard URL: {}".format(tensorboard_url))
+        logging.info("TensorBoard log dir: /var/log/tensorboard")
+        logging.info("Jupyter URL: {}".format(jupyter_ip_url))
+        logging.info("Ungit URL: {}".format(ungit_ip_url))
+        logging.info('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(
             notebook_config['key_name'], notebook_config['datalab_ssh_user'], ip_address))
 
         with open("/root/result.json", 'w') as result:
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/zeppelin_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/zeppelin_configure.py
index a9fe2b4..78a96a1 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/zeppelin_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/zeppelin_configure.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -34,12 +34,6 @@ from fabric import *
 
 if __name__ == "__main__":
     instance_class = 'notebook'
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         GCPMeta = datalab.meta_lib.GCPMeta()
         GCPActions = datalab.actions_lib.GCPActions()
@@ -106,7 +100,6 @@ if __name__ == "__main__":
             notebook_config['sudo_group'] = 'wheel'
 
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             instance_hostname, notebook_config['ssh_key_path'], notebook_config['initial_user'],
             notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
@@ -123,7 +116,6 @@ if __name__ == "__main__":
     # configuring proxy on Notebook instance
     try:
         logging.info('[CONFIGURE PROXY ON ZEPPELIN INSTANCE]')
-        print('[CONFIGURE PROXY ON ZEPPELIN INSTANCE]')
         additional_config = {"proxy_host": edge_instance_name, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}" \
             .format(instance_hostname, notebook_config['instance_name'], notebook_config['ssh_key_path'],
@@ -141,7 +133,6 @@ if __name__ == "__main__":
     # updating repositories & installing python packages
     try:
         logging.info('[INSTALLING PREREQUISITES TO ZEPPELIN NOTEBOOK INSTANCE]')
-        print('[INSTALLING PREREQUISITES TO ZEPPELIN NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}". \
             format(instance_hostname, notebook_config['ssh_key_path'], notebook_config['datalab_ssh_user'],
                    os.environ['gcp_region'], edge_instance_private_ip)
@@ -158,7 +149,6 @@ if __name__ == "__main__":
     # installing and configuring zeppelin and all dependencies
     try:
         logging.info('[CONFIGURE ZEPPELIN NOTEBOOK INSTANCE]')
-        print('[CONFIGURE ZEPPELIN NOTEBOOK INSTANCE]')
         additional_config = {"frontend_hostname": edge_instance_name,
                              "backend_hostname": instance_hostname,
                              "backend_port": "8080",
@@ -190,7 +180,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": os.environ['project_name'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -208,7 +197,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, notebook_config['ssh_key_path'])
@@ -225,20 +213,20 @@ if __name__ == "__main__":
 
     if notebook_config['image_enabled'] == 'true':
         try:
-            print('[CREATING IMAGE]')
+            logging.info('[CREATING IMAGE]')
             primary_image_id = GCPMeta.get_image_by_name(notebook_config['expected_primary_image_name'])
             if primary_image_id == '':
-                print("Looks like it's first time we configure notebook server. Creating images.")
+                logging.info("Looks like it's first time we configure notebook server. Creating images.")
                 image_id_list = GCPActions.create_image_from_instance_disks(
                     notebook_config['expected_primary_image_name'], notebook_config['expected_secondary_image_name'],
                     notebook_config['instance_name'], notebook_config['zone'], notebook_config['image_labels'])
                 if image_id_list and image_id_list[0] != '':
-                    print("Image of primary disk was successfully created. It's ID is {}".format(image_id_list[0]))
+                    logging.info("Image of primary disk was successfully created. It's ID is {}".format(image_id_list[0]))
                 else:
-                    print("Looks like another image creating operation for your template have been started a "
+                    logging.info("Looks like another image creating operation for your template have been started a "
                           "moment ago.")
                 if image_id_list and image_id_list[1] != '':
-                    print("Image of secondary disk was successfully created. It's ID is {}".format(image_id_list[1]))
+                    logging.info("Image of secondary disk was successfully created. It's ID is {}".format(image_id_list[1]))
         except Exception as err:
             datalab.fab.append_result("Failed creating image.", str(err))
             GCPActions.remove_instance(notebook_config['instance_name'], notebook_config['zone'])
@@ -247,7 +235,6 @@ if __name__ == "__main__":
             sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         additional_info = {
             'instance_hostname': instance_hostname,
@@ -284,16 +271,15 @@ if __name__ == "__main__":
             notebook_config['exploratory_name'])
         zeppelin_ungit_access_url = "https://" + edge_instance_hostname + "/{}-ungit/".format(
             notebook_config['exploratory_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['instance_name']))
-        print("Private IP: {}".format(ip_address))
-        print("Instance type: {}".format(notebook_config['instance_type']))
-        print("Key name: {}".format(notebook_config['key_name']))
-        print("User key name: {}".format(os.environ['project_name']))
-        print("Zeppelin URL: {}".format(zeppelin_ip_url))
-        print("Ungit URL: {}".format(ungit_ip_url))
-        print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(
+        logging.info("Instance name: {}".format(notebook_config['instance_name']))
+        logging.info("Private IP: {}".format(ip_address))
+        logging.info("Instance type: {}".format(notebook_config['instance_type']))
+        logging.info("Key name: {}".format(notebook_config['key_name']))
+        logging.info("User key name: {}".format(os.environ['project_name']))
+        logging.info("Zeppelin URL: {}".format(zeppelin_ip_url))
+        logging.info("Ungit URL: {}".format(ungit_ip_url))
+        logging.info('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(
             notebook_config['key_name'], notebook_config['datalab_ssh_user'], ip_address))
 
         with open("/root/result.json", 'w') as result:

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org


[incubator-datalab] 04/04: [DATALAB-2409]: replaced print with logging in all general/scripts/os .py scripts

Posted by lf...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

lfrolov pushed a commit to branch DATALAB-2409
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git

commit 5f4d257c9808db914a027531c5b2f66e2c459bc4
Author: leonidfrolov <fr...@gmail.com>
AuthorDate: Mon Oct 11 18:00:04 2021 +0300

    [DATALAB-2409]: replaced print with logging in all general/scripts/os .py scripts
---
 .../src/general/scripts/os/common_clean_instance.py    | 17 +++++++++--------
 .../src/general/scripts/os/common_configure_proxy.py   |  5 +++--
 .../scripts/os/common_configure_reverse_proxy.py       |  7 ++++---
 .../src/general/scripts/os/common_configure_spark.py   |  7 ++++---
 .../general/scripts/os/configure_proxy_for_docker.py   |  7 ++++---
 .../src/general/scripts/os/dataengine_install_libs.py  | 14 +++-----------
 .../src/general/scripts/os/dataengine_list_libs.py     | 14 +++-----------
 .../general/scripts/os/dataengine_reconfigure_spark.py | 18 +++++-------------
 .../src/general/scripts/os/get_list_available_pkgs.py  |  5 +++--
 .../src/general/scripts/os/install_additional_libs.py  | 17 +++++++++--------
 .../scripts/os/jupyter_install_dataengine_kernels.py   |  3 ++-
 .../general/scripts/os/jupyterlab_container_start.py   |  7 ++++---
 .../src/general/scripts/os/manage_git_creds.py         |  9 +++++----
 .../src/general/scripts/os/notebook_git_creds.py       | 12 ++----------
 .../general/scripts/os/notebook_inactivity_check.py    | 13 +++----------
 .../src/general/scripts/os/notebook_install_libs.py    | 14 +++-----------
 .../src/general/scripts/os/notebook_list_libs.py       | 14 +++-----------
 .../general/scripts/os/notebook_reconfigure_spark.py   | 18 +++++-------------
 .../scripts/os/rstudio_dataengine_create_configs.py    |  5 +++--
 .../src/general/scripts/os/superset_start.py           |  7 ++++---
 .../os/tensor-rstudio_dataengine_create_configs.py     |  5 +++--
 .../scripts/os/tensor_install_dataengine_kernels.py    |  3 ++-
 .../scripts/os/zeppelin_dataengine_create_configs.py   |  3 ++-
 23 files changed, 88 insertions(+), 136 deletions(-)

diff --git a/infrastructure-provisioning/src/general/scripts/os/common_clean_instance.py b/infrastructure-provisioning/src/general/scripts/os/common_clean_instance.py
index a9d370e..99e7904 100644
--- a/infrastructure-provisioning/src/general/scripts/os/common_clean_instance.py
+++ b/infrastructure-provisioning/src/general/scripts/os/common_clean_instance.py
@@ -25,6 +25,7 @@ import argparse
 import os
 import sys
 from datalab.notebook_lib import *
+from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -48,7 +49,7 @@ def general_clean():
         remove_os_pkg(['nodejs', 'npm'])
         conn.sudo('sed -i "/spark.*.memory/d" /opt/spark/conf/spark-defaults.conf')
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
 
 
@@ -64,7 +65,7 @@ def clean_jupyter():
         conn.sudo('rm -f /etc/systemd/system/jupyter-notebook.service')
         conn.sudo('systemctl daemon-reload')
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
 
 
@@ -79,7 +80,7 @@ def clean_zeppelin():
         conn.sudo('rm -f /etc/systemd/system/zeppelin-notebook.service')
         conn.sudo('systemctl daemon-reload')
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
 
 
@@ -89,7 +90,7 @@ def clean_rstudio():
         conn.sudo('rm -f /home/{}/.Rprofile'.format(args.os_user))
         conn.sudo('rm -f /home/{}/.Renviron'.format(args.os_user))
     except Exception as err:
-        print('Error:', str(err))
+        logging.error('Error:', str(err))
         sys.exit(1)
 
 
@@ -100,7 +101,7 @@ def clean_tensor():
         conn.sudo('systemctl disable tensorboard')
         conn.sudo('systemctl daemon-reload')
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
 
 
@@ -111,12 +112,12 @@ def clean_tensor_rstudio():
         conn.sudo('systemctl disable tensorboard')
         conn.sudo('systemctl daemon-reload')
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
 
 
 if __name__ == "__main__":
-    print('Configure connections')
+    logging.info('Configure connections')
     global conn
     conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
 
@@ -155,6 +156,6 @@ if __name__ == "__main__":
             elif args.application == ('tensor-rstudio'):
                 clean_tensor_rstudio()
     else:
-        print('Found default ami, do not make clean')
+        logging.info('Found default ami, do not make clean')
     #conn.close()
     sys.exit(0)
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/general/scripts/os/common_configure_proxy.py b/infrastructure-provisioning/src/general/scripts/os/common_configure_proxy.py
index 604a23a..19666df 100644
--- a/infrastructure-provisioning/src/general/scripts/os/common_configure_proxy.py
+++ b/infrastructure-provisioning/src/general/scripts/os/common_configure_proxy.py
@@ -25,6 +25,7 @@ import argparse
 import json
 from datalab.notebook_lib import *
 from datalab.fab import *
+from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -40,12 +41,12 @@ args = parser.parse_args()
 # Run script #
 ##############
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     global conn
     conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
     deeper_config = json.loads(args.additional_config)
 
-    print("Enabling proxy for notebook server for repositories access.")
+    logging.info("Enabling proxy for notebook server for repositories access.")
     datalab.notebook_lib.enable_proxy(deeper_config['proxy_host'], deeper_config['proxy_port'])
 
     conn.close()
diff --git a/infrastructure-provisioning/src/general/scripts/os/common_configure_reverse_proxy.py b/infrastructure-provisioning/src/general/scripts/os/common_configure_reverse_proxy.py
index 38ea331..f43a3a9 100644
--- a/infrastructure-provisioning/src/general/scripts/os/common_configure_reverse_proxy.py
+++ b/infrastructure-provisioning/src/general/scripts/os/common_configure_reverse_proxy.py
@@ -26,6 +26,7 @@ import json
 import sys
 from datalab.fab import *
 from datalab.meta_lib import get_instance_private_ip_address
+from datalab.logger import logging
 from fabric import *
 from jinja2 import Environment, FileSystemLoader
 from datalab.fab import *
@@ -96,15 +97,15 @@ def make_template():
 # Run script #
 ##############
 if __name__ == "__main__":
-    print("Make template")
+    logging.info("Make template")
 
     try:
         conf_file_name = make_template()
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
 
-    print("Configure connections")
+    logging.info("Configure connections")
     global conn
     conn = datalab.fab.init_datalab_connection(args.edge_hostname, args.os_user, args.keyfile)
     conn.put('/tmp/{}.conf'.format(conf_file_name), '/tmp/{}.conf'.format(conf_file_name))
diff --git a/infrastructure-provisioning/src/general/scripts/os/common_configure_spark.py b/infrastructure-provisioning/src/general/scripts/os/common_configure_spark.py
index 47f7b78..21477c8 100644
--- a/infrastructure-provisioning/src/general/scripts/os/common_configure_spark.py
+++ b/infrastructure-provisioning/src/general/scripts/os/common_configure_spark.py
@@ -28,6 +28,7 @@ import sys
 import time
 from datalab.fab import *
 from datalab.notebook_lib import *
+from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -47,7 +48,7 @@ def update_spark_defaults_conf(spark_conf):
             conn.sudo('''sed -i '/^# Updated/d' {0}'''.format(conf))
             conn.sudo('''echo "# Updated by DATALAB at {0} >> {1}'''.format(timestamp, conf))
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
 
 
@@ -76,12 +77,12 @@ def add_custom_spark_properties(cluster_name):
                 conn.sudo('echo "{0}" >> /opt/{1}/spark/conf/spark-defaults.conf'.format(prop, cluster_name))
             conn.sudo('sed -i "/^\s*$/d" /opt/{0}/spark/conf/spark-defaults.conf'.format(cluster_name))
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
 
 
 if __name__ == "__main__":
-    print('Configure connections')
+    logging.info('Configure connections')
     global conn
     conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
 
diff --git a/infrastructure-provisioning/src/general/scripts/os/configure_proxy_for_docker.py b/infrastructure-provisioning/src/general/scripts/os/configure_proxy_for_docker.py
index a233750..72c1c9c 100644
--- a/infrastructure-provisioning/src/general/scripts/os/configure_proxy_for_docker.py
+++ b/infrastructure-provisioning/src/general/scripts/os/configure_proxy_for_docker.py
@@ -25,6 +25,7 @@ import argparse
 import sys
 from fabric import *
 from datalab.fab import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--hostname', type=str, default='')
@@ -36,10 +37,10 @@ http_file = '/etc/systemd/system/docker.service.d/http-proxy.conf'
 https_file = '/etc/systemd/system/docker.service.d/https-proxy.conf'
 
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     global conn
     conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
-    print("Configuring proxy for docker")
+    logging.info("Configuring proxy for docker")
     try:
         conn.sudo('mkdir -p /etc/systemd/system/docker.service.d')
         conn.sudo('touch {}'.format(http_file))
@@ -56,6 +57,6 @@ if __name__ == "__main__":
         conn.sudo('update-rc.d docker enable')
         conn.sudo('systemctl restart docker')
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
     conn.close()
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/general/scripts/os/dataengine_install_libs.py b/infrastructure-provisioning/src/general/scripts/os/dataengine_install_libs.py
index 05f4885..6ce8615 100644
--- a/infrastructure-provisioning/src/general/scripts/os/dataengine_install_libs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/dataengine_install_libs.py
@@ -21,7 +21,7 @@
 #
 # ******************************************************************************
 
-import logging
+from datalab.logger import logging
 import multiprocessing
 import os
 import sys
@@ -44,22 +44,14 @@ def install_libs_on_slaves(slave, data_engine):
         # Run script to install additional libs
         subprocess.run("~/scripts/{}.py {}".format('install_additional_libs', params), shell=True, check=True)
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
 
 
 if __name__ == "__main__":
     instance_class = 'notebook'
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     try:
         logging.info('[INSTALLING ADDITIONAL LIBRARIES ON DATAENGINE]')
-        print('[INSTALLING ADDITIONAL LIBRARIES ON DATAENGINE]')
         data_engine = dict()
         try:
             data_engine['os_user'] = os.environ['conf_os_user']
@@ -100,6 +92,6 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         append_result("Failed to install additional libraries.", str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/os/dataengine_list_libs.py b/infrastructure-provisioning/src/general/scripts/os/dataengine_list_libs.py
index 2580279..b52df61 100644
--- a/infrastructure-provisioning/src/general/scripts/os/dataengine_list_libs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/dataengine_list_libs.py
@@ -21,7 +21,7 @@
 #
 # ******************************************************************************
 
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -33,16 +33,8 @@ from fabric import *
 
 if __name__ == "__main__":
     instance_class = 'notebook'
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     try:
         logging.info('[GETTING AVAILABLE PACKAGES]')
-        print('[GETTING AVAILABLE PACKAGES]')
         data_engine = dict()
         try:
             data_engine['os_user'] = os.environ['conf_os_user']
@@ -55,7 +47,7 @@ if __name__ == "__main__":
                 data_engine['tag_name'], data_engine['master_node_name'])
             data_engine['keyfile'] = '{}{}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
         except Exception as err:
-            print('Error: {0}'.format(err))
+            logging.error('Error: {0}'.format(err))
             append_result("Failed to get parameter.", str(err))
             sys.exit(1)
         params = "--os_user {} --instance_ip {} --keyfile '{}' --group {}" \
@@ -67,6 +59,6 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         append_result("Failed to get available libraries.", str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/os/dataengine_reconfigure_spark.py b/infrastructure-provisioning/src/general/scripts/os/dataengine_reconfigure_spark.py
index 41e07b4..08a56a6 100644
--- a/infrastructure-provisioning/src/general/scripts/os/dataengine_reconfigure_spark.py
+++ b/infrastructure-provisioning/src/general/scripts/os/dataengine_reconfigure_spark.py
@@ -22,7 +22,7 @@
 # ******************************************************************************
 
 import json
-import logging
+from datalab.logger import logging
 import multiprocessing
 import os
 import sys
@@ -43,22 +43,14 @@ def install_libs_on_slaves(slave, data_engine):
         # Run script to install additional libs
         subprocess.run("~/scripts/{}.py {}".format('reconfigure_spark', params), shell=True, check=True)
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
 
 
 if __name__ == "__main__":
     instance_class = 'notebook'
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     try:
         logging.info('[RECONFIGURING SPARK ON DATAENGINE]')
-        print('[RECONFIGURING SPARK ON DATAENGINE]')
         data_engine = dict()
         try:
             data_engine['os_user'] = os.environ['conf_os_user']
@@ -111,7 +103,7 @@ if __name__ == "__main__":
             raise Exception
 
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         append_result("Failed to reconfigure Spark.", str(err))
         sys.exit(1)
 
@@ -119,8 +111,8 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"service_base_name": data_engine['service_base_name'],
                    "Action": "Reconfigure Spark on Data Engine"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except:
-        print("Failed writing results.")
+        logging.error("Failed writing results.")
         sys.exit(0)
diff --git a/infrastructure-provisioning/src/general/scripts/os/get_list_available_pkgs.py b/infrastructure-provisioning/src/general/scripts/os/get_list_available_pkgs.py
index 8e33b20..9d25eb7 100644
--- a/infrastructure-provisioning/src/general/scripts/os/get_list_available_pkgs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/get_list_available_pkgs.py
@@ -28,6 +28,7 @@ import time
 import xmlrpc.client
 from datalab.fab import *
 from datalab.notebook_lib import *
+from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -53,7 +54,7 @@ def get_available_pip_pkgs(version):
                 time.sleep(5)
                 continue
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
 
 
@@ -70,7 +71,7 @@ def get_uncategorised_pip_pkgs(all_pkgs_pip2, all_pkgs_pip3):
             pip_pkgs[pkg] = "N/A"
         return pip_pkgs
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
 
 
diff --git a/infrastructure-provisioning/src/general/scripts/os/install_additional_libs.py b/infrastructure-provisioning/src/general/scripts/os/install_additional_libs.py
index c3661e4..f18f0cb 100644
--- a/infrastructure-provisioning/src/general/scripts/os/install_additional_libs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/install_additional_libs.py
@@ -27,6 +27,7 @@ import json
 import sys
 from datalab.fab import *
 from datalab.notebook_lib import *
+from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -42,7 +43,7 @@ if __name__ == "__main__":
     global conn
     conn = datalab.fab.init_datalab_connection(args.instance_ip, args.os_user, args.keyfile)
 
-    print('Installing libraries: {}'.format(args.libs))
+    logging.info('Installing libraries: {}'.format(args.libs))
     general_status = list()
     data = ast.literal_eval(args.libs)
     pkgs = {"libraries": {}}
@@ -59,40 +60,40 @@ if __name__ == "__main__":
                 pkgs['libraries'][data[row]['group']].append(
                     [data[row]['name'], data[row]['version']])
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         append_result("Failed to parse libs list.", str(err))
         sys.exit(1)
 
     try:
-        print('Installing os packages: {}'.format(pkgs['libraries']['os_pkg']))
+        logging.info('Installing os packages: {}'.format(pkgs['libraries']['os_pkg']))
         status = install_os_pkg(pkgs['libraries']['os_pkg'])
         general_status = general_status + status
     except KeyError:
         pass
 
     try:
-        print('Installing java dependencies: {}'.format(pkgs['libraries']['java']))
+        logging.info('Installing java dependencies: {}'.format(pkgs['libraries']['java']))
         status = install_java_pkg(pkgs['libraries']['java'])
         general_status = general_status + status
     except KeyError:
         pass
 
     #try:
-        #print('Installing pip2 packages: {}'.format(pkgs['libraries']['pip2']))
+        #logging.info('Installing pip2 packages: {}'.format(pkgs['libraries']['pip2']))
         #status = install_pip_pkg(pkgs['libraries']['pip2'], 'pip2', 'pip2', args.dataengine_service)
         #general_status = general_status + status
     #except KeyError:
         #pass
 
     try:
-        print('Installing pip3 packages: {}'.format(pkgs['libraries']['pip3']))
+        logging.info('Installing pip3 packages: {}'.format(pkgs['libraries']['pip3']))
         status = install_pip_pkg(pkgs['libraries']['pip3'], 'pip3', 'pip3', args.dataengine_service)
         general_status = general_status + status
     except KeyError:
         pass
 
     try:
-        print('Installing other packages (only tries pip3): {}'.format(pkgs['libraries']['others']))
+        logging.info('Installing other packages (only tries pip3): {}'.format(pkgs['libraries']['others']))
         for pkg in pkgs['libraries']['others']:
             status_pip3 = install_pip_pkg([pkg], 'pip3', 'others', args.dataengine_service)
             general_status = general_status + status_pip3
@@ -103,7 +104,7 @@ if __name__ == "__main__":
         and os.environ['notebook_r_enabled'] == 'true')\
             or os.environ['application'] in ('rstudio', 'tensor-rstudio'):
         try:
-            print('Installing R packages: {}'.format(pkgs['libraries']['r_pkg']))
+            logging.info('Installing R packages: {}'.format(pkgs['libraries']['r_pkg']))
             status = install_r_pkg(pkgs['libraries']['r_pkg'])
             general_status = general_status + status
         except KeyError:
diff --git a/infrastructure-provisioning/src/general/scripts/os/jupyter_install_dataengine_kernels.py b/infrastructure-provisioning/src/general/scripts/os/jupyter_install_dataengine_kernels.py
index 8fbc014..560ba1d 100644
--- a/infrastructure-provisioning/src/general/scripts/os/jupyter_install_dataengine_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/os/jupyter_install_dataengine_kernels.py
@@ -25,6 +25,7 @@ import argparse
 import os
 from datalab.fab import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 from fabric import *
 from patchwork.files import exists
 from patchwork import files
@@ -107,7 +108,7 @@ def install_sparkamagic_kernels(args):
                 spark_master_ip, args.os_user))
         datalab.fab.conn.sudo('sudo chown -R {0}:{0} /home/{0}/.sparkmagic/'.format(args.os_user))
     except Exception as err:
-        print(err)
+        logging.error(err)
         sys.exit(1)
 
 def create_inactivity_log(master_ip):
diff --git a/infrastructure-provisioning/src/general/scripts/os/jupyterlab_container_start.py b/infrastructure-provisioning/src/general/scripts/os/jupyterlab_container_start.py
index a7e50b1..c619e81 100644
--- a/infrastructure-provisioning/src/general/scripts/os/jupyterlab_container_start.py
+++ b/infrastructure-provisioning/src/general/scripts/os/jupyterlab_container_start.py
@@ -25,6 +25,7 @@ import sys
 import argparse
 from datalab.fab import *
 from datalab.notebook_lib import *
+from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -48,13 +49,13 @@ def start_jupyterlab_container(jupyterlab_dir):
     except: sys.exit(1)
 
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     global conn
     conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
-    print("Starting Jupyter container")
+    logging.info("Starting Jupyter container")
     try:
         start_jupyterlab_container(jupyterlab_dir)
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
     conn.close()
diff --git a/infrastructure-provisioning/src/general/scripts/os/manage_git_creds.py b/infrastructure-provisioning/src/general/scripts/os/manage_git_creds.py
index e8be6fb..6955f07 100644
--- a/infrastructure-provisioning/src/general/scripts/os/manage_git_creds.py
+++ b/infrastructure-provisioning/src/general/scripts/os/manage_git_creds.py
@@ -28,6 +28,7 @@ import sys
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.notebook_lib import *
+from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -52,14 +53,14 @@ if __name__ == "__main__":
             conn.run('rm .gitcreds')
         git_creds = os.environ['git_creds']
     except KeyError as err:
-        print('Error: {0}'.format(err))
-        print("Parameter git_creds does not exist. Skipping.")
+        logging.error('Error: {0}'.format(err))
+        logging.error("Parameter git_creds does not exist. Skipping.")
         sys.exit(0)
 
     try:
         data = ast.literal_eval(git_creds)
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         append_result("Failed to parse git credentials.", str(err))
         sys.exit(1)
 
@@ -83,7 +84,7 @@ if __name__ == "__main__":
         conn.put('new_gitcreds', '/home/{}/.gitcreds'.format(args.os_user))
 
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         append_result("Failed to add host/login/(password/token) to config.", str(err))
         sys.exit(1)
 
diff --git a/infrastructure-provisioning/src/general/scripts/os/notebook_git_creds.py b/infrastructure-provisioning/src/general/scripts/os/notebook_git_creds.py
index bb2b974..fdfe87f 100644
--- a/infrastructure-provisioning/src/general/scripts/os/notebook_git_creds.py
+++ b/infrastructure-provisioning/src/general/scripts/os/notebook_git_creds.py
@@ -21,7 +21,7 @@
 #
 # ******************************************************************************
 
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -33,16 +33,8 @@ from fabric import *
 
 if __name__ == "__main__":
     instance_class = 'notebook'
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     try:
         logging.info('[SETUP USER GIT CREDENTIALS]')
-        print('[SETUP USER GIT CREDENTIALS]')
         notebook_config = dict()
         notebook_config['notebook_name'] = os.environ['notebook_instance_name']
         notebook_config['os_user'] = os.environ['conf_os_user']
@@ -61,7 +53,7 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         append_result("Failed to manage git credentials.", str(err))
         sys.exit(1)
 
diff --git a/infrastructure-provisioning/src/general/scripts/os/notebook_inactivity_check.py b/infrastructure-provisioning/src/general/scripts/os/notebook_inactivity_check.py
index 6784b81..c70593b 100644
--- a/infrastructure-provisioning/src/general/scripts/os/notebook_inactivity_check.py
+++ b/infrastructure-provisioning/src/general/scripts/os/notebook_inactivity_check.py
@@ -28,18 +28,11 @@ import subprocess
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/project/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     try:
         logging.info('[ASK INACTIVITY STATUS]')
-        print('[ASK INACTIVITY STATUS]')
         notebook_config = dict()
         try:
             notebook_config['notebook_name'] = os.environ['notebook_instance_name']
@@ -57,7 +50,7 @@ if __name__ == "__main__":
             else:
                 notebook_config['dataengine_ip'] = '0.0.0.0'
         except Exception as err:
-            print('Error: {0}'.format(err))
+            logging.error('Error: {0}'.format(err))
             append_result("Failed to get parameter.", str(err))
             sys.exit(1)
         params = "--os_user {0} --instance_ip {1} --keyfile '{2}' --resource_type {3} --dataengine_ip {4}" \
@@ -69,6 +62,6 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         append_result("Failed to ask inactivity status.", str(err))
         sys.exit(1)
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/general/scripts/os/notebook_install_libs.py b/infrastructure-provisioning/src/general/scripts/os/notebook_install_libs.py
index 50b9609..b894449 100644
--- a/infrastructure-provisioning/src/general/scripts/os/notebook_install_libs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/notebook_install_libs.py
@@ -21,7 +21,7 @@
 #
 # ******************************************************************************
 
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -33,16 +33,8 @@ from fabric import *
 
 if __name__ == "__main__":
     instance_class = 'notebook'
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     try:
         logging.info('[INSTALLING ADDITIONAL LIBRARIES ON NOTEBOOK INSTANCE]')
-        print('[INSTALLING ADDITIONAL LIBRARIES ON NOTEBOOK INSTANCE]')
         notebook_config = dict()
         try:
             notebook_config['notebook_name'] = os.environ['notebook_instance_name']
@@ -54,7 +46,7 @@ if __name__ == "__main__":
             notebook_config['keyfile'] = '{}{}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
             notebook_config['libs'] = os.environ['libs']
         except Exception as err:
-            print('Error: {0}'.format(err))
+            logging.error('Error: {0}'.format(err))
             append_result("Failed to get parameter.", str(err))
             sys.exit(1)
         params = '--os_user {} --instance_ip {} --keyfile "{}" --libs "{}"' \
@@ -67,6 +59,6 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         append_result("Failed to install additional libraries.", str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/os/notebook_list_libs.py b/infrastructure-provisioning/src/general/scripts/os/notebook_list_libs.py
index e6e989e..4cf4215 100644
--- a/infrastructure-provisioning/src/general/scripts/os/notebook_list_libs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/notebook_list_libs.py
@@ -21,7 +21,7 @@
 #
 # ******************************************************************************
 
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -33,16 +33,8 @@ from fabric import *
 
 if __name__ == "__main__":
     instance_class = 'notebook'
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     try:
         logging.info('[GETTING AVAILABLE PACKAGES]')
-        print('[GETTING AVAILABLE PACKAGES]')
         notebook_config = dict()
         try:
             notebook_config['notebook_name'] = os.environ['notebook_instance_name']
@@ -54,7 +46,7 @@ if __name__ == "__main__":
                 notebook_config['tag_name'], notebook_config['notebook_name'])
             notebook_config['keyfile'] = '{}{}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
         except Exception as err:
-            print('Error: {0}'.format(err))
+            logging.error('Error: {0}'.format(err))
             append_result("Failed to get parameter.", str(err))
             sys.exit(1)
         params = "--os_user {} --instance_ip {} --keyfile '{}' --group {}" \
@@ -66,6 +58,6 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         append_result("Failed to get available libraries.", str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/os/notebook_reconfigure_spark.py b/infrastructure-provisioning/src/general/scripts/os/notebook_reconfigure_spark.py
index 596d4d8..fa98c77 100644
--- a/infrastructure-provisioning/src/general/scripts/os/notebook_reconfigure_spark.py
+++ b/infrastructure-provisioning/src/general/scripts/os/notebook_reconfigure_spark.py
@@ -22,7 +22,7 @@
 # ******************************************************************************
 
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -34,16 +34,8 @@ from fabric import *
 
 if __name__ == "__main__":
     instance_class = 'notebook'
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     try:
         logging.info('[RECONFIGURING SPARK]')
-        print('[RECONFIGURING SPARK]')
         notebook_config = dict()
         try:
             notebook_config['notebook_name'] = os.environ['notebook_instance_name']
@@ -54,7 +46,7 @@ if __name__ == "__main__":
                 notebook_config['tag_name'], notebook_config['notebook_name'])
             notebook_config['keyfile'] = '{}{}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
         except Exception as err:
-            print('Error: {0}'.format(err))
+            logging.error('Error: {0}'.format(err))
             append_result("Failed to get parameter.", str(err))
             sys.exit(1)
         params = "--os_user {} --instance_ip {} --keyfile '{}' --resource_type notebook " \
@@ -66,7 +58,7 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         append_result("Failed to reconfigure Spark.", str(err))
         sys.exit(1)
 
@@ -75,8 +67,8 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"service_base_name": notebook_config['service_base_name'],
                    "Action": "Reconfigure Spark on Notebook"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except:
-        print("Failed writing results.")
+        logging.error("Failed writing results.")
         sys.exit(0)
diff --git a/infrastructure-provisioning/src/general/scripts/os/rstudio_dataengine_create_configs.py b/infrastructure-provisioning/src/general/scripts/os/rstudio_dataengine_create_configs.py
index c7e0017..58f07af 100644
--- a/infrastructure-provisioning/src/general/scripts/os/rstudio_dataengine_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/rstudio_dataengine_create_configs.py
@@ -29,6 +29,7 @@ from datalab.actions_lib import *
 from datalab.common_lib import *
 from datalab.fab import *
 from datalab.notebook_lib import *
+from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -64,7 +65,7 @@ def configure_rstudio():
             subprocess.run('''R -e "source('/home/{}/.Rprofile')"'''.format(args.os_user), shell=True, check=True)
             subprocess.run('touch /home/' + args.os_user + '/.ensure_dir/rstudio_dataengine_ensured', shell=True, check=True)
         except Exception as err:
-            print('Error: {0}'.format(err))
+            logging.error('Error: {0}'.format(err))
             sys.exit(1)
     else:
         try:
@@ -78,7 +79,7 @@ def configure_rstudio():
                   args.os_user + '/.Rprofile', shell=True, check=True)
             subprocess.run('''R -e "source('/home/{}/.Rprofile')"'''.format(args.os_user), shell=True, check=True)
         except Exception as err:
-            print('Error: {0}'.format(err))
+            logging.error('Error: {0}'.format(err))
             sys.exit(1)
 
 
diff --git a/infrastructure-provisioning/src/general/scripts/os/superset_start.py b/infrastructure-provisioning/src/general/scripts/os/superset_start.py
index 9f976ae..88fb46f 100644
--- a/infrastructure-provisioning/src/general/scripts/os/superset_start.py
+++ b/infrastructure-provisioning/src/general/scripts/os/superset_start.py
@@ -25,6 +25,7 @@ import argparse
 import sys
 from datalab.fab import *
 from datalab.notebook_lib import *
+from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -47,14 +48,14 @@ def start_superset(superset_dir):
     except: sys.exit(1)
 
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     global conn
     conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
-    print("Starting Superset")
+    logging.info("Starting Superset")
     try:
         start_superset(superset_dir)
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
 
     conn.close()
diff --git a/infrastructure-provisioning/src/general/scripts/os/tensor-rstudio_dataengine_create_configs.py b/infrastructure-provisioning/src/general/scripts/os/tensor-rstudio_dataengine_create_configs.py
index c8965b6..3255bb0 100644
--- a/infrastructure-provisioning/src/general/scripts/os/tensor-rstudio_dataengine_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/tensor-rstudio_dataengine_create_configs.py
@@ -28,6 +28,7 @@ from datalab.actions_lib import *
 from datalab.common_lib import *
 from datalab.fab import *
 from datalab.notebook_lib import *
+from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -63,7 +64,7 @@ def configure_rstudio():
             subprocess.run('''R -e "source('/home/{}/.Rprofile')"'''.format(args.os_user), shell=True, check=True)
             subprocess.run('touch /home/' + args.os_user + '/.ensure_dir/rstudio_dataengine_ensured', shell=True, check=True)
         except Exception as err:
-            print('Error: {0}'.format(err))
+            logging.error('Error: {0}'.format(err))
             sys.exit(1)
     else:
         try:
@@ -77,7 +78,7 @@ def configure_rstudio():
                   args.os_user + '/.Rprofile', shell=True, check=True)
             subprocess.run('''R -e "source('/home/{}/.Rprofile')"'''.format(args.os_user), shell=True, check=True)
         except Exception as err:
-            print('Error: {0}'.format(err))
+            logging.error('Error: {0}'.format(err))
             sys.exit(1)
 
 
diff --git a/infrastructure-provisioning/src/general/scripts/os/tensor_install_dataengine_kernels.py b/infrastructure-provisioning/src/general/scripts/os/tensor_install_dataengine_kernels.py
index e6d27aa..c155e42 100644
--- a/infrastructure-provisioning/src/general/scripts/os/tensor_install_dataengine_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/os/tensor_install_dataengine_kernels.py
@@ -25,6 +25,7 @@ import argparse
 import os
 from datalab.fab import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 from fabric import *
 from patchwork.files import exists
 from patchwork import files
@@ -100,7 +101,7 @@ def install_sparkamagic_kernels(args):
                 spark_master_ip, args.os_user))
         datalab.fab.conn.sudo('sudo chown -R {0}:{0} /home/{0}/.sparkmagic/'.format(args.os_user))
     except Exception as err:
-        print(err)
+        logging.error(err)
         sys.exit(1)
 
 def create_inactivity_log(master_ip, hoststring):
diff --git a/infrastructure-provisioning/src/general/scripts/os/zeppelin_dataengine_create_configs.py b/infrastructure-provisioning/src/general/scripts/os/zeppelin_dataengine_create_configs.py
index 7f424eb..52cf241 100644
--- a/infrastructure-provisioning/src/general/scripts/os/zeppelin_dataengine_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/zeppelin_dataengine_create_configs.py
@@ -29,6 +29,7 @@ from datalab.actions_lib import *
 from datalab.common_lib import *
 from datalab.fab import *
 from datalab.notebook_lib import *
+from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -146,7 +147,7 @@ def configure_zeppelin_dataengine_interpreter(cluster_name, cluster_dir, os_user
                         subprocess.run('sleep 5', shell=True, check=True)
         subprocess.run('touch /home/' + os_user + '/.ensure_dir/dataengine_' + cluster_name + '_interpreter_ensured', shell=True, check=True)
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
 
 

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org