You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@datalab.apache.org by lf...@apache.org on 2021/10/11 15:00:20 UTC

[incubator-datalab] 02/04: [DATALAB-2409]: replaced print with logging in all general/scripts/azure .py scripts

This is an automated email from the ASF dual-hosted git repository.

lfrolov pushed a commit to branch DATALAB-2409
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git

commit 94919c31e428022564cbe6881dfb300c3050169d
Author: leonidfrolov <fr...@gmail.com>
AuthorDate: Mon Oct 11 17:27:01 2021 +0300

    [DATALAB-2409]: replaced print with logging in all general/scripts/azure .py scripts
---
 .../general/scripts/azure/common_collect_data.py   |  3 +-
 .../azure/common_create_datalake_directory.py      |  9 ++--
 .../scripts/azure/common_create_instance.py        | 15 +++---
 .../scripts/azure/common_create_notebook_image.py  |  7 +--
 .../scripts/azure/common_create_security_group.py  |  9 ++--
 .../scripts/azure/common_create_storage_account.py | 13 ++---
 .../general/scripts/azure/common_create_subnet.py  |  9 ++--
 .../scripts/azure/common_download_git_certfile.py  |  5 +-
 .../azure/common_notebook_configure_dataengine.py  | 15 ++----
 .../scripts/azure/common_prepare_notebook.py       | 28 ++++------
 .../scripts/azure/common_remove_remote_kernels.py  |  5 +-
 .../general/scripts/azure/common_reupload_key.py   |  3 +-
 .../general/scripts/azure/common_start_notebook.py | 26 +++------
 .../general/scripts/azure/common_stop_notebook.py  | 22 +++-----
 .../scripts/azure/common_terminate_notebook.py     | 21 +++-----
 .../general/scripts/azure/dataengine_configure.py  | 41 ++++----------
 .../general/scripts/azure/dataengine_prepare.py    | 23 +++-----
 .../src/general/scripts/azure/dataengine_start.py  | 18 ++-----
 .../src/general/scripts/azure/dataengine_stop.py   | 17 ++----
 .../general/scripts/azure/dataengine_terminate.py  | 19 +++----
 .../scripts/azure/deeplearning_configure.py        | 46 ++++++----------
 .../src/general/scripts/azure/edge_configure.py    | 45 ++++++----------
 .../src/general/scripts/azure/edge_prepare.py      | 56 ++++++++-----------
 .../src/general/scripts/azure/edge_start.py        | 23 +++-----
 .../src/general/scripts/azure/edge_status.py       | 11 +---
 .../src/general/scripts/azure/edge_stop.py         | 14 ++---
 .../src/general/scripts/azure/edge_terminate.py    | 46 +++++++---------
 .../src/general/scripts/azure/jupyter_configure.py | 41 +++++---------
 .../general/scripts/azure/jupyterlab_configure.py  | 42 +++++----------
 .../src/general/scripts/azure/project_prepare.py   | 46 ++++++----------
 .../src/general/scripts/azure/project_terminate.py | 54 ++++++++-----------
 .../general/scripts/azure/rstudio_change_pass.py   |  7 +--
 .../src/general/scripts/azure/rstudio_configure.py | 46 ++++++----------
 .../src/general/scripts/azure/ssn_configure.py     | 59 ++++++++------------
 .../general/scripts/azure/ssn_create_datalake.py   |  9 ++--
 .../general/scripts/azure/ssn_create_peering.py    |  5 +-
 .../scripts/azure/ssn_create_resource_group.py     |  7 +--
 .../src/general/scripts/azure/ssn_create_vpc.py    |  7 +--
 .../src/general/scripts/azure/ssn_prepare.py       | 29 +++-------
 .../src/general/scripts/azure/ssn_terminate.py     | 63 ++++++++++------------
 .../src/general/scripts/azure/tensor_configure.py  | 47 ++++++----------
 .../general/scripts/azure/zeppelin_configure.py    | 41 +++++---------
 42 files changed, 379 insertions(+), 673 deletions(-)

diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_collect_data.py b/infrastructure-provisioning/src/general/scripts/azure/common_collect_data.py
index 11a62db..ee8eda7 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_collect_data.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_collect_data.py
@@ -29,6 +29,7 @@ import traceback
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -44,7 +45,7 @@ if __name__ == "__main__":
             data_instances = AzureMeta().get_list_instance_statuses(args.resource_group_name, data.get('host'))
             statuses['host'] = data_instances
         except:
-            print("Hosts JSON wasn't been provided")
+            logging.error("Hosts JSON wasn't been provided")
         with open('/root/result.json', 'w') as outfile:
             json.dump(statuses, outfile)
     except Exception as err:
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_create_datalake_directory.py b/infrastructure-provisioning/src/general/scripts/azure/common_create_datalake_directory.py
index 29ff0c2..f93260d 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_create_datalake_directory.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_create_datalake_directory.py
@@ -26,6 +26,7 @@ import sys
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--resource_group_name', type=str, default='')
@@ -42,10 +43,10 @@ if __name__ == "__main__":
         for datalake in AzureMeta().list_datalakes(args.resource_group_name):
             if args.datalake_name == datalake.tags["Name"]:
                 if AzureMeta().verify_datalake_directory(datalake.name, args.directory_name):
-                    print("Data Lake Store Directory '{}' already exist".format(args.directory_name))
+                    logging.info("Data Lake Store Directory '{}' already exist".format(args.directory_name))
                 else:
                     AzureActions().create_datalake_directory(datalake.name, args.directory_name)
-                    print("Data Lake Store Directory '{}' has been created".format(args.directory_name))
+                    logging.info("Data Lake Store Directory '{}' has been created".format(args.directory_name))
                     if args.ad_user != '':
                        AzureActions().set_user_permissions_to_datalake_directory(
                            datalake.name, '/{}'.format(args.directory_name), args.ad_user)
@@ -57,8 +58,8 @@ if __name__ == "__main__":
                                                                 ad_group=args.ad_group)
                 datalake_exists = True
         if not datalake_exists:
-            print("Requested Data Lake Store '{}' is missing".format(datalake.name))
+            logging.info("Requested Data Lake Store '{}' is missing".format(datalake.name))
             sys.exit(1)
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_create_instance.py b/infrastructure-provisioning/src/general/scripts/azure/common_create_instance.py
index 5ad8253..3f284d3 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_create_instance.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_create_instance.py
@@ -26,6 +26,7 @@ import json
 import sys
 from datalab.actions_lib import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--instance_name', type=str, default='')
@@ -56,25 +57,25 @@ if __name__ == "__main__":
     if args.instance_name != '':
         try:
             if AzureMeta().get_instance(args.resource_group_name, args.instance_name):
-                print("REQUESTED INSTANCE {} ALREADY EXISTS".format(args.instance_name))
+                logging.info("REQUESTED INSTANCE {} ALREADY EXISTS".format(args.instance_name))
             else:
                 if args.public_ip_name != 'None':
                     if AzureMeta().get_static_ip(args.resource_group_name, args.public_ip_name):
-                        print("REQUESTED PUBLIC IP ADDRESS {} ALREADY EXISTS.".format(args.public_ip_name))
+                        logging.info("REQUESTED PUBLIC IP ADDRESS {} ALREADY EXISTS.".format(args.public_ip_name))
                         static_public_ip_address = AzureMeta().get_static_ip(
                             args.resource_group_name, args.public_ip_name).ip_address
                     else:
-                        print("Creating Static IP address {}".format(args.public_ip_name))
+                        logging.info("Creating Static IP address {}".format(args.public_ip_name))
                         static_public_ip_address = \
                             AzureActions().create_static_public_ip(args.resource_group_name, args.public_ip_name,
                                                                    args.region, args.instance_name,
                                                                    json.loads(args.tags))
                 if AzureMeta().get_network_interface(args.resource_group_name, args.network_interface_name):
-                    print("REQUESTED NETWORK INTERFACE {} ALREADY EXISTS.".format(args.network_interface_name))
+                    logging.info("REQUESTED NETWORK INTERFACE {} ALREADY EXISTS.".format(args.network_interface_name))
                     network_interface_id = AzureMeta().get_network_interface(args.resource_group_name,
                                                                              args.network_interface_name).id
                 else:
-                    print("Creating Network Interface {}".format(args.network_interface_name))
+                    logging.info("Creating Network Interface {}".format(args.network_interface_name))
                     network_interface_id = AzureActions().create_network_if(args.resource_group_name, args.vpc_name,
                                                                             args.subnet_name,
                                                                             args.network_interface_name, args.region,
@@ -86,7 +87,7 @@ if __name__ == "__main__":
                 if disk:
                     create_option = 'attach'
                     disk_id = disk.id
-                print("Creating instance {}".format(args.instance_name))
+                logging.info("Creating instance {}".format(args.instance_name))
                 AzureActions().create_instance(args.region, args.instance_size, args.service_base_name,
                                                args.instance_name, args.datalab_ssh_user_name, args.public_key,
                                                network_interface_id, args.resource_group_name, args.primary_disk_size,
@@ -95,7 +96,7 @@ if __name__ == "__main__":
                                                create_option, disk_id, args.instance_storage_account_type,
                                                args.image_type)
         except Exception as err:
-            print('Error: {0}'.format(err))
+            logging.error('Error: {0}'.format(err))
             sys.exit(1)
     else:
         parser.print_help()
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_create_notebook_image.py b/infrastructure-provisioning/src/general/scripts/azure/common_create_notebook_image.py
index 294d7a8..a80c43e 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_create_notebook_image.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_create_notebook_image.py
@@ -24,6 +24,7 @@
 import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
+from datalab.logger import logging
 import json
 import os
 import sys
@@ -76,7 +77,7 @@ if __name__ == "__main__":
 
         image = AzureMeta.get_image(image_conf['resource_group_name'], image_conf['full_image_name'])
         if image == '':
-            print('Creating image from existing notebook.')
+            logging.info('Creating image from existing notebook.')
             datalab.actions_lib.prepare_vm_for_image(True, image_conf['datalab_ssh_user'], instance_hostname,
                                                      keyfile_name)
             AzureActions.create_image_from_instance(image_conf['resource_group_name'],
@@ -84,7 +85,7 @@ if __name__ == "__main__":
                                                     os.environ['azure_region'],
                                                     image_conf['full_image_name'],
                                                     json.dumps(image_conf['tags']))
-            print("Image was successfully created.")
+            logging.info("Image was successfully created.")
             try:
                 subprocess.run("~/scripts/{}.py".format('common_prepare_notebook'), shell=True, check=True)
                 instance_running = False
@@ -103,7 +104,7 @@ if __name__ == "__main__":
                     .format(instance_hostname, image_conf['instance_name'], keyfile_name,
                             json.dumps(additional_config), image_conf['datalab_ssh_user'])
                 subprocess.run("~/scripts/{}.py {}".format('common_configure_proxy', params), shell=True, check=True)
-                print("Image was successfully created. It's name is {}".format(image_conf['full_image_name']))
+                logging.info("Image was successfully created. It's name is {}".format(image_conf['full_image_name']))
             except Exception as err:
                 AzureActions.remove_instance(image_conf['resource_group_name'], image_conf['instance_name'])
                 datalab.fab.append_result("Failed to create instance from image.", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_create_security_group.py b/infrastructure-provisioning/src/general/scripts/azure/common_create_security_group.py
index 05b6a8b..c206a76 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_create_security_group.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_create_security_group.py
@@ -26,6 +26,7 @@ import json
 import sys
 from datalab.actions_lib import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--resource_group_name', type=str, default='')
@@ -39,17 +40,17 @@ args = parser.parse_args()
 if __name__ == "__main__":
     try:
         if AzureMeta().get_security_group(args.resource_group_name, args.security_group_name):
-            print("REQUESTED SECURITY GROUP {} ALREADY EXISTS. Updating rules".format(args.security_group_name))
+            logging.info("REQUESTED SECURITY GROUP {} ALREADY EXISTS. Updating rules".format(args.security_group_name))
             security_group = AzureActions().create_security_group(args.resource_group_name, args.security_group_name,
                                                                   args.region, json.loads(args.tags),
                                                                   json.loads(args.list_rules), True)
         else:
-            print("Creating security group {}.".format(args.security_group_name))
+            logging.info("Creating security group {}.".format(args.security_group_name))
             security_group = AzureActions().create_security_group(args.resource_group_name, args.security_group_name,
                                                                   args.region, json.loads(args.tags),
                                                                   json.loads(args.list_rules))
-            print("SECURITY GROUP {} has been created".format(args.security_group_name))
+            logging.info("SECURITY GROUP {} has been created".format(args.security_group_name))
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
 
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_create_storage_account.py b/infrastructure-provisioning/src/general/scripts/azure/common_create_storage_account.py
index 04f47bd..17676b0 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_create_storage_account.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_create_storage_account.py
@@ -27,6 +27,7 @@ import sys
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--container_name', type=str, default='')
@@ -42,20 +43,20 @@ if __name__ == "__main__":
         for storage_account in AzureMeta().list_storage_accounts(args.resource_group_name):
             if account_tags["Name"] == storage_account.tags["Name"]:
                 check_account = True
-                print("REQUESTED STORAGE ACCOUNT {} ALREADY EXISTS".format(storage_account.name))
+                logging.info("REQUESTED STORAGE ACCOUNT {} ALREADY EXISTS".format(storage_account.name))
         if not check_account:
             account_name = id_generator().lower()
             check = AzureMeta().check_account_availability(account_name)
             if check.name_available:
-                print("Creating storage account {}.".format(account_name))
+                logging.info("Creating storage account {}.".format(account_name))
                 storage_account = AzureActions().create_storage_account(args.resource_group_name, account_name,
                                                                         args.region, account_tags)
                 blob_container = AzureActions().create_blob_container(args.resource_group_name, account_name,
                                                                       args.container_name)
-                print("STORAGE ACCOUNT {} has been created".format(account_name))
-                print("CONTAINER {} has been created".format(args.container_name))
+                logging.info("STORAGE ACCOUNT {} has been created".format(account_name))
+                logging.info("CONTAINER {} has been created".format(args.container_name))
             else:
-                print("STORAGE ACCOUNT with name {0} could not be created. {1}".format(account_name, check.message))
+                logging.info("STORAGE ACCOUNT with name {0} could not be created. {1}".format(account_name, check.message))
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_create_subnet.py b/infrastructure-provisioning/src/general/scripts/azure/common_create_subnet.py
index 6345565..e774714 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_create_subnet.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_create_subnet.py
@@ -26,6 +26,7 @@ import ipaddress
 import sys
 from datalab.actions_lib import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--resource_group_name', type=str, default='')
@@ -64,14 +65,14 @@ if __name__ == "__main__":
             datalab_subnet_cidr = '{0}/{1}'.format(ipaddress.ip_address(last_ip + 1), args.prefix)
         if args.subnet_name != '':
             if AzureMeta().get_subnet(args.resource_group_name, args.vpc_name, args.subnet_name):
-                print("REQUESTED SUBNET {} ALREADY EXISTS".format(args.subnet_name))
+                logging.info("REQUESTED SUBNET {} ALREADY EXISTS".format(args.subnet_name))
             else:
-                print("Creating Subnet {}".format(args.subnet_name))
+                logging.info("Creating Subnet {}".format(args.subnet_name))
                 AzureActions().create_subnet(args.resource_group_name, args.vpc_name, args.subnet_name,
                                              datalab_subnet_cidr)
         else:
-            print("Subnet name can't be empty")
+            logging.info("Subnet name can't be empty")
             sys.exit(1)
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_download_git_certfile.py b/infrastructure-provisioning/src/general/scripts/azure/common_download_git_certfile.py
index eb707fe..58e18bb 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_download_git_certfile.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_download_git_certfile.py
@@ -25,6 +25,7 @@ import argparse
 import os
 from datalab.actions_lib import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -49,8 +50,8 @@ if __name__ == "__main__":
     if AzureActions().download_from_container(resource_group_name, ssn_storage_account_name, container_name, gitlab_certfile):
         conn.put(gitlab_certfile, gitlab_certfile)
         conn.sudo('chown root:root {}'.format(gitlab_certfile))
-        print('{} has been downloaded'.format(gitlab_certfile))
+        logging.info('{} has been downloaded'.format(gitlab_certfile))
     else:
-        print('There is no {} to download'.format(gitlab_certfile))
+        logging.info('There is no {} to download'.format(gitlab_certfile))
 
     conn.close()
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_notebook_configure_dataengine.py b/infrastructure-provisioning/src/general/scripts/azure/common_notebook_configure_dataengine.py
index 0e4f206..b650ad0 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_notebook_configure_dataengine.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_notebook_configure_dataengine.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -41,18 +41,11 @@ def clear_resources():
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     try:
         # generating variables dictionary
         AzureMeta = datalab.meta_lib.AzureMeta()
         AzureActions = datalab.actions_lib.AzureActions()
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         notebook_config = dict()
         if 'exploratory_name' in os.environ:
             notebook_config['exploratory_name'] = os.environ['exploratory_name']
@@ -98,7 +91,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[INSTALLING KERNELS INTO SPECIFIED NOTEBOOK]')
-        print('[INSTALLING KERNELS INTO SPECIFIED NOTEBOOK]')
         params = "--cluster_name {0} --spark_version {1} --hadoop_version {2} --os_user {3} --spark_master {4}" \
                  " --keyfile {5} --notebook_ip {6} --datalake_enabled {7} --spark_master_ip {8}".\
             format(notebook_config['cluster_name'], os.environ['notebook_spark_version'],
@@ -117,7 +109,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[UPDATING SPARK CONFIGURATION FILES ON NOTEBOOK]')
-        print('[UPDATING SPARK CONFIGURATION FILES ON NOTEBOOK]')
         params = "--hostname {0} " \
                  "--keyfile {1} " \
                  "--os_user {2} " \
@@ -140,7 +131,7 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"notebook_name": notebook_config['notebook_name'],
                    "Action": "Configure notebook server"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_prepare_notebook.py b/infrastructure-provisioning/src/general/scripts/azure/common_prepare_notebook.py
index d37735a..27dfcb8 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_prepare_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_prepare_notebook.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -34,14 +34,6 @@ from Crypto.PublicKey import RSA
 from fabric import *
 
 if __name__ == "__main__":
-    instance_class = 'notebook'
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     # generating variables dictionary
     try:
         AzureMeta = datalab.meta_lib.AzureMeta()
@@ -54,7 +46,7 @@ if __name__ == "__main__":
         notebook_config['endpoint_tag'] = notebook_config['endpoint_name']
         notebook_config['application'] = os.environ['application'].lower()
 
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         try:
             notebook_config['exploratory_name'] = os.environ['exploratory_name']
         except:
@@ -113,16 +105,16 @@ if __name__ == "__main__":
                 notebook_config['endpoint_name'],
                 notebook_config['application'])
 
-        print('Searching pre-configured images')
+        logging.info('Searching pre-configured images')
         notebook_config['image_name'] = os.environ['azure_{}_image_name'.format(os.environ['conf_os_family'])]
         if os.environ['conf_deeplearning_cloud_ami'] == 'true' and os.environ['application'] == 'deeplearning':
             if AzureMeta.get_image(notebook_config['resource_group_name'], notebook_config['expected_image_name']):
                 notebook_config['image_name'] = notebook_config['expected_image_name']
                 notebook_config['image_type'] = 'pre-configured'
-                print('Pre-configured image found. Using: {}'.format(notebook_config['image_name']))
+                logging.info('Pre-configured image found. Using: {}'.format(notebook_config['image_name']))
             else:
                 notebook_config['image_name'] = os.environ['notebook_image_name']
-                print('Pre-configured deeplearning image found. Using: {}'.format(notebook_config['image_name']))
+                logging.info('Pre-configured deeplearning image found. Using: {}'.format(notebook_config['image_name']))
         else:
             notebook_config['notebook_image_name'] = (lambda x: '{0}-{1}-{2}-{3}-{4}'.format(
                 notebook_config['service_base_name'], notebook_config['project_name'], notebook_config['endpoint_name'],
@@ -131,12 +123,12 @@ if __name__ == "__main__":
             if AzureMeta.get_image(notebook_config['resource_group_name'], notebook_config['notebook_image_name']):
                 notebook_config['image_name'] = notebook_config['notebook_image_name']
                 notebook_config['image_type'] = 'pre-configured'
-                print('Pre-configured image found. Using: {}'.format(notebook_config['notebook_image_name']))
+                logging.info('Pre-configured image found. Using: {}'.format(notebook_config['notebook_image_name']))
             else:
                 os.environ['notebook_image_name'] = notebook_config['image_name']
-                print('No pre-configured image found. Using default one: {}'.format(notebook_config['image_name']))
+                logging.info('No pre-configured image found. Using default one: {}'.format(notebook_config['image_name']))
     except Exception as err:
-        print("Failed to generate variables dictionary.")
+        logging.error("Failed to generate variables dictionary.")
         datalab.fab.append_result("Failed to generate variables dictionary.", str(err))
         sys.exit(1)
 
@@ -148,7 +140,6 @@ if __name__ == "__main__":
 
         if edge_status != 'running':
             logging.info('ERROR: Edge node is unavailable! Aborting...')
-            print('ERROR: Edge node is unavailable! Aborting...')
             ssn_hostname = AzureMeta.get_private_ip_address(notebook_config['resource_group_name'],
                                                               os.environ['conf_service_base_name'] + '-ssn')
             datalab.fab.put_resource_status('edge', 'Unavailable', os.environ['ssn_datalab_path'],
@@ -167,7 +158,6 @@ if __name__ == "__main__":
     # launching instance for notebook server
     try:
         logging.info('[CREATE NOTEBOOK INSTANCE]')
-        print('[CREATE NOTEBOOK INSTANCE]')
         params = "--instance_name {} --instance_size {} --region {} --vpc_name {} --network_interface_name {} \
             --security_group_name {} --subnet_name {} --service_base_name {} --resource_group_name {} \
             --datalab_ssh_user_name {} --public_ip_name {} --public_key '''{}''' --primary_disk_size {} \
@@ -190,6 +180,6 @@ if __name__ == "__main__":
         try:
             AzureActions.remove_instance(notebook_config['resource_group_name'], notebook_config['instance_name'])
         except:
-            print("The instance hasn't been created.")
+            logging.error("The instance hasn't been created.")
         datalab.fab.append_result("Failed to create instance.", str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_remove_remote_kernels.py b/infrastructure-provisioning/src/general/scripts/azure/common_remove_remote_kernels.py
index 4aa8d12..128329a 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_remove_remote_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_remove_remote_kernels.py
@@ -25,6 +25,7 @@ import argparse
 import sys
 from datalab.actions_lib import *
 from datalab.fab import find_cluster_kernels
+from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -37,7 +38,7 @@ args = parser.parse_args()
 
 
 if __name__ == "__main__":
-    print('Configure connections')
+    logging.info('Configure connections')
     global conn
     conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
 
@@ -47,6 +48,6 @@ if __name__ == "__main__":
             AzureActions().remove_dataengine_kernels(args.resource_group_name, args.notebook_name,
                                                      args.os_user, args.keyfile, cluster)
     except Exception as err:
-        print('Failed to remove cluster kernels.', str(err))
+        logging.error('Failed to remove cluster kernels.', str(err))
         sys.exit(1)
     conn.close()
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_reupload_key.py b/infrastructure-provisioning/src/general/scripts/azure/common_reupload_key.py
index 73dff2f..09c1bd4 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_reupload_key.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_reupload_key.py
@@ -27,6 +27,7 @@ import subprocess
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -47,5 +48,5 @@ if __name__ == "__main__":
         try:
             subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except Exception as err:
-            print('Error: {0}'.format(err))
+            logging.error('Error: {0}'.format(err))
             sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_start_notebook.py b/infrastructure-provisioning/src/general/scripts/azure/common_start_notebook.py
index af27198..798e454 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_start_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_start_notebook.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -33,16 +33,10 @@ import subprocess
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     # generating variables dictionary
     AzureMeta = datalab.meta_lib.AzureMeta()
     AzureActions = datalab.actions_lib.AzureActions()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     notebook_config = dict()
     notebook_config['service_base_name'] = os.environ['conf_service_base_name']
     notebook_config['resource_group_name'] = os.environ['azure_resource_group_name']
@@ -50,11 +44,11 @@ if __name__ == "__main__":
 
     try:
         logging.info('[START NOTEBOOK]')
-        print('[START NOTEBOOK]')
+        logging.info('[START NOTEBOOK]')
         try:
-            print("Starting notebook")
+            logging.info("Starting notebook")
             AzureActions.start_instance(notebook_config['resource_group_name'], notebook_config['notebook_name'])
-            print("Instance {} has been started".format(notebook_config['notebook_name']))
+            logging.info("Instance {} has been started".format(notebook_config['notebook_name']))
         except:
             traceback.print_exc()
             raise Exception
@@ -64,7 +58,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[SETUP USER GIT CREDENTIALS]')
-        print('[SETUP USER GIT CREDENTIALS]')
         notebook_config['notebook_ip'] = AzureMeta.get_private_ip_address(
             notebook_config['resource_group_name'], notebook_config['notebook_name'])
         notebook_config['keyfile'] = '{}{}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
@@ -82,7 +75,6 @@ if __name__ == "__main__":
     if os.environ['azure_datalake_enable'] == 'true':
         try:
             logging.info('[UPDATE STORAGE CREDENTIALS]')
-            print('[UPDATE STORAGE CREDENTIALS]')
             notebook_config['notebook_ip'] = AzureMeta.get_private_ip_address(
                 notebook_config['resource_group_name'], notebook_config['notebook_name'])
             global conn
@@ -103,7 +95,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[UPDATE LAST ACTIVITY TIME]')
-        print('[UPDATE LAST ACTIVITY TIME]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(os.environ['conf_os_user'], notebook_config['notebook_ip'], notebook_config['keyfile'])
         try:
@@ -118,15 +109,14 @@ if __name__ == "__main__":
     try:
         ip_address = AzureMeta.get_private_ip_address(notebook_config['resource_group_name'],
                                                       notebook_config['notebook_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['notebook_name']))
-        print("Private IP: {}".format(ip_address))
+        logging.info("Instance name: {}".format(notebook_config['notebook_name']))
+        logging.info("Private IP: {}".format(ip_address))
         with open("/root/result.json", 'w') as result:
             res = {"ip": ip_address,
                    "notebook_name": notebook_config['notebook_name'],
                    "Action": "Start up notebook server"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_stop_notebook.py b/infrastructure-provisioning/src/general/scripts/azure/common_stop_notebook.py
index 82a9533..3154875 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_stop_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_stop_notebook.py
@@ -25,13 +25,13 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 
 
 def stop_notebook(resource_group_name, notebook_name):
-    print("Stopping data engine cluster")
+    logging.info("Stopping data engine cluster")
     cluster_list = []
     try:
         for vm in AzureMeta.compute_client.virtual_machines.list(resource_group_name):
@@ -40,35 +40,28 @@ def stop_notebook(resource_group_name, notebook_name):
                     if 'master' == vm.tags["Type"]:
                         cluster_list.append(vm.tags["Name"])
                     AzureActions.stop_instance(resource_group_name, vm.name)
-                    print("Instance {} has been stopped".format(vm.name))
+                    logging.info("Instance {} has been stopped".format(vm.name))
     except Exception as err:
         datalab.fab.append_result("Failed to stop clusters", str(err))
         sys.exit(1)
 
-    print("Stopping notebook")
+    logging.info("Stopping notebook")
     try:
         for vm in AzureMeta.compute_client.virtual_machines.list(resource_group_name):
             if "Name" in vm.tags:
                 if notebook_name == vm.tags["Name"]:
                     AzureActions.stop_instance(resource_group_name, vm.name)
-                    print("Instance {} has been stopped".format(vm.name))
+                    logging.info("Instance {} has been stopped".format(vm.name))
     except Exception as err:
         datalab.fab.append_result("Failed to stop instance", str(err))
         sys.exit(1)
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     # generating variables dictionary
     AzureMeta = datalab.meta_lib.AzureMeta()
     AzureActions = datalab.actions_lib.AzureActions()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     notebook_config = dict()
     if 'exploratory_name' in os.environ:
         notebook_config['exploratory_name'] = os.environ['exploratory_name']
@@ -82,7 +75,6 @@ if __name__ == "__main__":
     notebook_config['notebook_name'] = os.environ['notebook_instance_name']
 
     logging.info('[STOP NOTEBOOK]')
-    print('[STOP NOTEBOOK]')
     try:
         stop_notebook(notebook_config['resource_group_name'], notebook_config['notebook_name'])
     except Exception as err:
@@ -93,7 +85,7 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"notebook_name": notebook_config['notebook_name'],
                    "Action": "Stop notebook server"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_terminate_notebook.py b/infrastructure-provisioning/src/general/scripts/azure/common_terminate_notebook.py
index d2b8216..77ef93f 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_terminate_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_terminate_notebook.py
@@ -25,47 +25,41 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
 
 
 def terminate_nb(resource_group_name, notebook_name):
-    print("Terminating data engine cluster")
+    logging.info("Terminating data engine cluster")
     try:
         for vm in AzureMeta.compute_client.virtual_machines.list(resource_group_name):
             if "notebook_name" in vm.tags:
                 if notebook_name == vm.tags['notebook_name']:
                     AzureActions.remove_instance(resource_group_name, vm.name)
-                    print("Instance {} has been terminated".format(vm.name))
+                    logging.info("Instance {} has been terminated".format(vm.name))
     except Exception as err:
         datalab.fab.append_result("Failed to terminate clusters", str(err))
         sys.exit(1)
 
-    print("Terminating notebook")
+    logging.info("Terminating notebook")
     try:
         for vm in AzureMeta.compute_client.virtual_machines.list(resource_group_name):
             if "Name" in vm.tags:
                 if notebook_name == vm.tags["Name"]:
                     AzureActions.remove_instance(resource_group_name, vm.name)
-                    print("Instance {} has been terminated".format(vm.name))
+                    logging.info("Instance {} has been terminated".format(vm.name))
     except Exception as err:
         datalab.fab.append_result("Failed to terminate instance", str(err))
         sys.exit(1)
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     # generating variables dictionary
     AzureMeta = datalab.meta_lib.AzureMeta()
     AzureActions = datalab.actions_lib.AzureActions()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     notebook_config = dict()
     if 'exploratory_name' in os.environ:
         notebook_config['exploratory_name'] = os.environ['exploratory_name']
@@ -80,7 +74,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[TERMINATE NOTEBOOK]')
-        print('[TERMINATE NOTEBOOK]')
         try:
             terminate_nb(notebook_config['resource_group_name'], notebook_config['notebook_name'])
         except Exception as err:
@@ -94,7 +87,7 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"notebook_name": notebook_config['notebook_name'],
                    "Action": "Terminate notebook server"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/azure/dataengine_configure.py b/infrastructure-provisioning/src/general/scripts/azure/dataengine_configure.py
index 27bb216..3d25aea 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/dataengine_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/dataengine_configure.py
@@ -25,7 +25,7 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import multiprocessing
 import os
 import sys
@@ -40,7 +40,6 @@ def configure_slave(slave_number, data_engine):
     slave_hostname = AzureMeta.get_private_ip_address(data_engine['resource_group_name'], slave_name)
     try:
         logging.info('[CREATING DATALAB SSH USER ON SLAVE NODE]')
-        print('[CREATING DATALAB SSH USER ON SLAVE NODE]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format \
             (slave_hostname, os.environ['conf_key_dir'] + data_engine['key_name'] + ".pem", initial_user,
              data_engine['datalab_ssh_user'], sudo_group)
@@ -56,7 +55,6 @@ def configure_slave(slave_number, data_engine):
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY ON SLAVE]')
         logging.info('[INSTALLING USERs KEY ON SLAVE]')
         additional_config = {"user_keyname": data_engine['project_name'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -75,7 +73,6 @@ def configure_slave(slave_number, data_engine):
 
     try:
         logging.info('[CLEANING INSTANCE FOR SLAVE NODE]')
-        print('[CLEANING INSTANCE FOR SLAVE NODE]')
         params = '--hostname {} --keyfile {} --os_user {} --application {}' \
             .format(slave_hostname, keyfile_name, data_engine['datalab_ssh_user'], os.environ['application'])
         try:
@@ -90,7 +87,6 @@ def configure_slave(slave_number, data_engine):
 
     try:
         logging.info('[CONFIGURE PROXY ON SLAVE NODE]')
-        print('[CONFIGURE PROXY ON ON SLAVE NODE]')
         additional_config = {"proxy_host": edge_instance_private_hostname, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}"\
             .format(slave_hostname, slave_name, keyfile_name, json.dumps(additional_config),
@@ -107,7 +103,6 @@ def configure_slave(slave_number, data_engine):
 
     try:
         logging.info('[INSTALLING PREREQUISITES ON SLAVE NODE]')
-        print('[INSTALLING PREREQUISITES ON SLAVE NODE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}". \
             format(slave_hostname, keyfile_name, data_engine['datalab_ssh_user'], data_engine['region'],
                    edge_instance_private_hostname)
@@ -123,7 +118,6 @@ def configure_slave(slave_number, data_engine):
 
     try:
         logging.info('[CONFIGURE SLAVE NODE {}]'.format(slave + 1))
-        print('[CONFIGURE SLAVE NODE {}]'.format(slave + 1))
         params = "--hostname {} --keyfile {} --region {} --spark_version {} --hadoop_version {} --os_user {} " \
                  "--scala_version {} --master_ip {} --node_type {}". \
             format(slave_hostname, keyfile_name, data_engine['region'], os.environ['notebook_spark_version'],
@@ -149,17 +143,10 @@ def clear_resources():
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.INFO,
-                        filename=local_log_filepath)
-
     try:
         AzureMeta = datalab.meta_lib.AzureMeta()
         AzureActions = datalab.actions_lib.AzureActions()
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         data_engine = dict()
         if 'exploratory_name' in os.environ:
             data_engine['exploratory_name'] = os.environ['exploratory_name']
@@ -230,8 +217,7 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        logging.info('[CREATING DATA ATA LAB SSH USER ON MASTER NODE]')
-        print('[CREATING DATALAB SSH USER ON MASTER NODE]')
+        logging.info('[CREATING DATALAB SSH USER ON MASTER NODE]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format \
             (master_node_hostname, os.environ['conf_key_dir'] + data_engine['key_name'] + ".pem", initial_user,
              data_engine['datalab_ssh_user'], sudo_group)
@@ -247,7 +233,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY ON MASTER]')
         logging.info('[INSTALLING USERs KEY ON MASTER]')
         additional_config = {"user_keyname": data_engine['project_name'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -267,7 +252,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CLEANING INSTANCE FOR MASTER NODE]')
-        print('[CLEANING INSTANCE FOR MASTER NODE]')
         params = '--hostname {} --keyfile {} --os_user {} --application {}' \
             .format(master_node_hostname, keyfile_name, data_engine['datalab_ssh_user'], os.environ['application'])
         try:
@@ -282,7 +266,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CONFIGURE PROXY ON MASTER NODE]')
-        print('[CONFIGURE PROXY ON ON MASTER NODE]')
         additional_config = {"proxy_host": edge_instance_private_hostname, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}"\
             .format(master_node_hostname, data_engine['master_node_name'], keyfile_name, json.dumps(additional_config),
@@ -299,7 +282,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[INSTALLING PREREQUISITES ON MASTER NODE]')
-        print('[INSTALLING PREREQUISITES ON MASTER NODE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}". \
             format(master_node_hostname, keyfile_name, data_engine['datalab_ssh_user'], data_engine['region'],
                    edge_instance_private_hostname)
@@ -315,7 +297,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CONFIGURE MASTER NODE]')
-        print('[CONFIGURE MASTER NODE]')
         params = "--hostname {} --keyfile {} --region {} --spark_version {} --hadoop_version {} --os_user {} " \
                  "--scala_version {} --master_ip {} --node_type {}".\
             format(master_node_hostname, keyfile_name, data_engine['region'], os.environ['notebook_spark_version'],
@@ -349,7 +330,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         notebook_instance_ip = AzureMeta.get_private_ip_address(data_engine['resource_group_name'],
                                                                 data_engine['notebook_name'])
@@ -391,13 +371,12 @@ if __name__ == "__main__":
         spark_master_access_url = "https://" + edge_instance_hostname + "/{}/".format(
             data_engine['exploratory_name'] + '_' + data_engine['computational_name'])
         logging.info('[SUMMARY]')
-        print('[SUMMARY]')
-        print("Service base name: {}".format(data_engine['service_base_name']))
-        print("Region: {}".format(data_engine['region']))
-        print("Cluster name: {}".format(data_engine['cluster_name']))
-        print("Master node shape: {}".format(data_engine['master_size']))
-        print("Slave node shape: {}".format(data_engine['slave_size']))
-        print("Instance count: {}".format(str(data_engine['instance_count'])))
+        logging.info("Service base name: {}".format(data_engine['service_base_name']))
+        logging.info("Region: {}".format(data_engine['region']))
+        logging.info("Cluster name: {}".format(data_engine['cluster_name']))
+        logging.info("Master node shape: {}".format(data_engine['master_size']))
+        logging.info("Slave node shape: {}".format(data_engine['slave_size']))
+        logging.info("Instance count: {}".format(str(data_engine['instance_count'])))
         with open("/root/result.json", 'w') as result:
             res = {"hostname": data_engine['cluster_name'],
                    "instance_id": data_engine['master_node_name'],
@@ -410,7 +389,7 @@ if __name__ == "__main__":
                        # "url": spark_master_url}
                    ]
                    }
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/azure/dataengine_prepare.py b/infrastructure-provisioning/src/general/scripts/azure/dataengine_prepare.py
index 995b7d0..06dd514 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/dataengine_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/dataengine_prepare.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -34,12 +34,6 @@ from Crypto.PublicKey import RSA
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.INFO,
-                        filename=local_log_filepath)
     try:
         AzureMeta = datalab.meta_lib.AzureMeta()
         AzureActions = datalab.actions_lib.AzureActions()
@@ -49,7 +43,7 @@ if __name__ == "__main__":
         data_engine['endpoint_name'] = os.environ['endpoint_name']
         data_engine['project_tag'] = data_engine['project_name']
         data_engine['endpoint_tag'] = data_engine['endpoint_name']
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         if 'exploratory_name' in os.environ:
             data_engine['exploratory_name'] = os.environ['exploratory_name']
         else:
@@ -119,15 +113,15 @@ if __name__ == "__main__":
         data_engine['notebook_image_name'] = (lambda x: os.environ['notebook_image_name'] if x != 'None'
                     else data_engine['expected_image_name'])(str(os.environ.get('notebook_image_name')))
 
-        print('Searching pre-configured images')
+        logging.info('Searching pre-configured images')
         if AzureMeta.get_image(data_engine['resource_group_name'], data_engine['notebook_image_name']) and \
                         os.environ['application'] in os.environ['dataengine_image_notebooks'].split(','):
             data_engine['image_name'] = data_engine['notebook_image_name']
             data_engine['image_type'] = 'pre-configured'
-            print('Pre-configured image found. Using: {}'.format(data_engine['notebook_image_name']))
+            logging.info('Pre-configured image found. Using: {}'.format(data_engine['notebook_image_name']))
         else:
             data_engine['image_name'] = os.environ['azure_{}_image_name'.format(os.environ['conf_os_family'])]
-            print('No pre-configured image found. Using default one: {}'.format(data_engine['image_name']))
+            logging.info('No pre-configured image found. Using default one: {}'.format(data_engine['image_name']))
     except Exception as err:
         datalab.fab.append_result("Failed to generate variables dictionary", str(err))
         sys.exit(1)
@@ -139,7 +133,6 @@ if __name__ == "__main__":
                                                                               data_engine['endpoint_name']))
         if edge_status != 'running':
             logging.info('ERROR: Edge node is unavailable! Aborting...')
-            print('ERROR: Edge node is unavailable! Aborting...')
             ssn_hostname = AzureMeta.get_private_ip_address(data_engine['resource_group_name'],
                                                             data_engine['service_base_name'] + '-ssn')
             datalab.fab.put_resource_status('edge', 'Unavailable', os.environ['ssn_datalab_path'],
@@ -160,7 +153,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATE MASTER NODE]')
-        print('[CREATE MASTER NODE]')
 
         if 'NC' in data_engine['master_size']:
             data_engine['instance_storage_account_type'] = 'Standard_LRS'
@@ -186,14 +178,13 @@ if __name__ == "__main__":
         try:
             AzureActions.remove_instance(data_engine['resource_group_name'], data_engine['master_node_name'])
         except:
-            print("The instance hasn't been created.")
+            logging.info("The instance hasn't been created.")
         datalab.fab.append_result("Failed to create master instance.", str(err))
         sys.exit(1)
 
     try:
         for i in range(data_engine['instance_count'] - 1):
             logging.info('[CREATE SLAVE NODE {}]'.format(i + 1))
-            print('[CREATE SLAVE NODE {}]'.format(i + 1))
 
             slave_name = data_engine['slave_node_name'] + '{}'.format(i + 1)
             slave_nif_name = slave_name + '-nif'
@@ -222,7 +213,7 @@ if __name__ == "__main__":
             try:
                 AzureActions.remove_instance(data_engine['resource_group_name'], slave_name)
             except:
-                print("The slave instance {} hasn't been created.".format(slave_name))
+                logging.info("The slave instance {} hasn't been created.".format(slave_name))
         AzureActions.remove_instance(data_engine['resource_group_name'], data_engine['master_node_name'])
         datalab.fab.append_result("Failed to create slave instances.", str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/azure/dataengine_start.py b/infrastructure-provisioning/src/general/scripts/azure/dataengine_start.py
index 2f100fd..5b7938b 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/dataengine_start.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/dataengine_start.py
@@ -25,7 +25,7 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -34,29 +34,23 @@ from fabric import *
 
 
 def start_data_engine(resource_group_name, cluster_name):
-    print("Starting data engine cluster")
+    logging.info("Starting data engine cluster")
     try:
         for vm in AzureMeta.compute_client.virtual_machines.list(resource_group_name):
             if "Name" in vm.tags:
                 if cluster_name == vm.tags["Name"]:
                     AzureActions.start_instance(resource_group_name, vm.name)
-                    print("Instance {} has been started".format(vm.name))
+                    logging.info("Instance {} has been started".format(vm.name))
     except Exception as err:
         datalab.fab.append_result("Failed to start dataengine", str(err))
         sys.exit(1)
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     # generating variables dictionary
     AzureMeta = datalab.meta_lib.AzureMeta()
     AzureActions = datalab.actions_lib.AzureActions()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     data_engine = dict()
     if 'exploratory_name' in os.environ:
         data_engine['exploratory_name'] = os.environ['exploratory_name']
@@ -77,7 +71,6 @@ if __name__ == "__main__":
                                                           data_engine['computational_name'])
     try:
         logging.info('[STARTING DATA ENGINE]')
-        print('[STARTING DATA ENGINE]')
         try:
             start_data_engine(data_engine['resource_group_name'], data_engine['cluster_name'])
         except Exception as err:
@@ -89,7 +82,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[UPDATE LAST ACTIVITY TIME]')
-        print('[UPDATE LAST ACTIVITY TIME]')
         data_engine['computational_id'] = data_engine['cluster_name'] + '-m'
         data_engine['notebook_ip'] = AzureMeta.get_private_ip_address(data_engine['resource_group_name'],
                                                                       os.environ['notebook_instance_name'])
@@ -112,7 +104,7 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"service_base_name": data_engine['service_base_name'],
                    "Action": "Start Data Engine"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/azure/dataengine_stop.py b/infrastructure-provisioning/src/general/scripts/azure/dataengine_stop.py
index 9db2c9b..62ecad7 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/dataengine_stop.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/dataengine_stop.py
@@ -25,36 +25,30 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
 
 
 def stop_data_engine(resource_group_name, cluster_name):
-    print("Stopping data engine cluster")
+    logging.info("Stopping data engine cluster")
     try:
         for vm in AzureMeta.compute_client.virtual_machines.list(resource_group_name):
             if "Name" in vm.tags:
                 if cluster_name == vm.tags["Name"]:
                     AzureActions.stop_instance(resource_group_name, vm.name)
-                    print("Instance {} has been stopped".format(vm.name))
+                    logging.info("Instance {} has been stopped".format(vm.name))
     except Exception as err:
         datalab.fab.append_result("Failed to stop dataengine", str(err))
         sys.exit(1)
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     # generating variables dictionary
     AzureMeta = datalab.meta_lib.AzureMeta()
     AzureActions = datalab.actions_lib.AzureActions()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     data_engine = dict()
     if 'exploratory_name' in os.environ:
         data_engine['exploratory_name'] = os.environ['exploratory_name']
@@ -75,7 +69,6 @@ if __name__ == "__main__":
                                                           data_engine['computational_name'])
     try:
         logging.info('[STOPPING DATA ENGINE]')
-        print('[STOPPING DATA ENGINE]')
         try:
             stop_data_engine(data_engine['resource_group_name'], data_engine['cluster_name'])
         except Exception as err:
@@ -89,7 +82,7 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"service_base_name": data_engine['service_base_name'],
                    "Action": "Stop Data Engine"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/azure/dataengine_terminate.py b/infrastructure-provisioning/src/general/scripts/azure/dataengine_terminate.py
index 7cec539..95ca3b4 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/dataengine_terminate.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/dataengine_terminate.py
@@ -25,25 +25,25 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
 
 
 def terminate_data_engine(resource_group_name, notebook_name, os_user, key_path, cluster_name):
-    print("Terminating data engine cluster")
+    logging.info("Terminating data engine cluster")
     try:
         for vm in AzureMeta.compute_client.virtual_machines.list(resource_group_name):
             if "Name" in vm.tags:
                 if cluster_name == vm.tags["Name"]:
                     AzureActions.remove_instance(resource_group_name, vm.name)
-                    print("Instance {} has been terminated".format(vm.name))
+                    logging.info("Instance {} has been terminated".format(vm.name))
     except Exception as err:
         datalab.fab.append_result("Failed to terminate dataengine", str(err))
         sys.exit(1)
 
-    print("Removing Data Engine kernels from notebook")
+    logging.info("Removing Data Engine kernels from notebook")
     try:
         AzureActions.remove_dataengine_kernels(resource_group_name, notebook_name, os_user, key_path, cluster_name)
     except Exception as err:
@@ -52,16 +52,10 @@ def terminate_data_engine(resource_group_name, notebook_name, os_user, key_path,
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     # generating variables dictionary
     AzureMeta = datalab.meta_lib.AzureMeta()
     AzureActions = datalab.actions_lib.AzureActions()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     data_engine = dict()
     if 'exploratory_name' in os.environ:
         data_engine['exploratory_name'] = os.environ['exploratory_name']
@@ -86,7 +80,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[TERMINATE DATA ENGINE]')
-        print('[TERMINATE DATA ENGINE]')
         try:
             terminate_data_engine(data_engine['resource_group_name'], data_engine['notebook_name'],
                                   os.environ['conf_os_user'], data_engine['key_path'], data_engine['cluster_name'])
@@ -101,7 +94,7 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"service_base_name": data_engine['service_base_name'],
                    "Action": "Terminate Data Engine"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/azure/deeplearning_configure.py b/infrastructure-provisioning/src/general/scripts/azure/deeplearning_configure.py
index c228c97..ae83807 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/deeplearning_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/deeplearning_configure.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -33,13 +33,6 @@ import subprocess
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     try:
         AzureMeta = datalab.meta_lib.AzureMeta()
         AzureActions = datalab.actions_lib.AzureActions()
@@ -134,8 +127,7 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        logging.info('[CREATING DataLab SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
+        logging.info('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             instance_hostname, os.environ['conf_key_dir'] + os.environ['conf_key_name'] + ".pem",
             notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
@@ -153,7 +145,6 @@ if __name__ == "__main__":
     # configuring proxy on Notebook instance
     try:
         logging.info('[CONFIGURE PROXY ON DEEP LEARNING INSTANCE]')
-        print('[CONFIGURE PROXY ON DEEP LEARNING  INSTANCE]')
         additional_config = {"proxy_host": edge_instance_private_hostname, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}"\
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
@@ -169,7 +160,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": notebook_config['user_keyname'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -188,7 +178,6 @@ if __name__ == "__main__":
     # updating repositories & installing python packages
     try:
         logging.info('[INSTALLING PREREQUISITES TO DEEPLEARNING NOTEBOOK INSTANCE]')
-        print('[INSTALLING PREREQUISITES TO DEEPLEARNING NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}".format(
             instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['azure_region'],
             edge_instance_private_hostname)
@@ -204,7 +193,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CONFIGURE DEEP LEARNING NOTEBOOK INSTANCE]')
-        print('[CONFIGURE DEEP LEARNING NOTEBOOK INSTANCE]')
         params = "--hostname {0} --keyfile {1} " \
                  "--os_user {2} --jupyter_version {3} " \
                  "--scala_version {4} --spark_version {5} " \
@@ -228,7 +216,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
@@ -244,7 +231,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[POST CONFIGURING PROCESS]')
-        print('[POST CONFIGURING PROCESS')
         if notebook_config['notebook_image_name'] not in [notebook_config['expected_image_name'], 'None', '']:
             params = "--hostname {} --keyfile {} --os_user {} --resource_group_name {} --notebook_name {}" \
                 .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
@@ -261,10 +247,10 @@ if __name__ == "__main__":
 
     if notebook_config['image_enabled'] == 'true':
         try:
-            print('[CREATING IMAGE]')
+            logging.info('[CREATING IMAGE]')
             image = AzureMeta.get_image(notebook_config['resource_group_name'], notebook_config['expected_image_name'])
             if image == '':
-                print("Looks like it's first time we configure notebook server. Creating image.")
+                logging.info("Looks like it's first time we configure notebook server. Creating image.")
                 datalab.actions_lib.prepare_vm_for_image(True, notebook_config['datalab_ssh_user'], instance_hostname,
                                                          keyfile_name)
                 AzureActions.create_image_from_instance(notebook_config['resource_group_name'],
@@ -272,7 +258,7 @@ if __name__ == "__main__":
                                                         os.environ['azure_region'],
                                                         notebook_config['expected_image_name'],
                                                         json.dumps(notebook_config['image_tags']))
-                print("Image was successfully created.")
+                logging.info("Image was successfully created.")
                 subprocess.run("~/scripts/{}.py".format('common_prepare_notebook'), shell=True, check=True)
                 instance_running = False
                 while not instance_running:
@@ -296,7 +282,6 @@ if __name__ == "__main__":
             sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         additional_info = {
             'instance_hostname': instance_hostname,
@@ -336,18 +321,17 @@ if __name__ == "__main__":
             notebook_config['exploratory_name'])
         tensorboard_access_url = "https://" + edge_instance_hostname + "/{}-tensor/".format(
             notebook_config['exploratory_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['instance_name']))
-        print("Private IP: {}".format(ip_address))
-        print("Instance type: {}".format(notebook_config['instance_size']))
-        print("Key name: {}".format(notebook_config['key_name']))
-        print("User key name: {}".format(notebook_config['user_keyname']))
-        print("SG name: {}".format(notebook_config['security_group_name']))
-        print("Jupyter URL: {}".format(jupyter_ip_url))
-        print("Tensor Board URL: {}".format(tensorboard_ip_url))
-        print("Ungit URL: {}".format(ungit_ip_url))
-        print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(
+        logging.info("Instance name: {}".format(notebook_config['instance_name']))
+        logging.info("Private IP: {}".format(ip_address))
+        logging.info("Instance type: {}".format(notebook_config['instance_size']))
+        logging.info("Key name: {}".format(notebook_config['key_name']))
+        logging.info("User key name: {}".format(notebook_config['user_keyname']))
+        logging.info("SG name: {}".format(notebook_config['security_group_name']))
+        logging.info("Jupyter URL: {}".format(jupyter_ip_url))
+        logging.info("Tensor Board URL: {}".format(tensorboard_ip_url))
+        logging.info("Ungit URL: {}".format(ungit_ip_url))
+        logging.info('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(
             notebook_config['key_name'], notebook_config['datalab_ssh_user'], ip_address))
 
         with open("/root/result.json", 'w') as result:
diff --git a/infrastructure-provisioning/src/general/scripts/azure/edge_configure.py b/infrastructure-provisioning/src/general/scripts/azure/edge_configure.py
index 46a9607..adc7c35 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/edge_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/edge_configure.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -34,13 +34,6 @@ import subprocess
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/edge/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     def clear_resources():
         AzureActions.remove_instance(edge_conf['resource_group_name'], edge_conf['instance_name'])
         AzureActions.remove_subnet(edge_conf['resource_group_name'], edge_conf['vpc_name'],
@@ -61,7 +54,7 @@ if __name__ == "__main__":
                     AzureActions.remove_datalake_directory(datalake.name, edge_conf['datalake_user_directory_name'])
 
     try:
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         AzureMeta = datalab.meta_lib.AzureMeta()
         AzureActions = datalab.actions_lib.AzureActions()
         edge_conf = dict()
@@ -151,7 +144,6 @@ if __name__ == "__main__":
             edge_conf['sudo_group'] = 'wheel'
 
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             edge_conf['instance_hostname'], os.environ['conf_key_dir'] + os.environ['conf_key_name'] + ".pem",
             edge_conf['initial_user'], edge_conf['datalab_ssh_user'], edge_conf['sudo_group'])
@@ -167,7 +159,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING PREREQUISITES]')
         logging.info('[INSTALLING PREREQUISITES]')
         params = "--hostname {} --keyfile {} --user {} --region {}".format(
             edge_conf['instance_hostname'], edge_conf['keyfile_name'], edge_conf['datalab_ssh_user'],
@@ -183,7 +174,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING HTTP PROXY]')
         logging.info('[INSTALLING HTTP PROXY]')
         additional_config = {"exploratory_subnet": edge_conf['private_subnet_cidr'],
                              "template_file": "/root/templates/squid.conf",
@@ -209,7 +199,6 @@ if __name__ == "__main__":
 
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": edge_conf['user_keyname'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -227,7 +216,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING NGINX REVERSE PROXY]')
         logging.info('[INSTALLING NGINX REVERSE PROXY]')
         edge_conf['keycloak_client_secret'] = str(uuid.uuid4())
         params = "--hostname {} --keyfile {} --user {} --keycloak_client_id {} --keycloak_client_secret {} " \
@@ -270,25 +258,24 @@ if __name__ == "__main__":
             if edge_conf['user_storage_account_name'] == storage_account.tags["Name"]:
                 edge_conf['user_storage_account_name'] = storage_account.name
 
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(edge_conf['instance_name']))
-        print("Hostname: {}".format(edge_conf['instance_dns_name']))
-        print("Public IP: {}".format(edge_conf['edge_public_ip']))
-        print("Private IP: {}".format(edge_conf['edge_private_ip']))
-        print("Key name: {}".format(edge_conf['key_name']))
-        print("User storage account name: {}".format(edge_conf['user_storage_account_name']))
-        print("User container name: {}".format(edge_conf['user_container_name']))
+        logging.info("Instance name: {}".format(edge_conf['instance_name']))
+        logging.info("Hostname: {}".format(edge_conf['instance_dns_name']))
+        logging.info("Public IP: {}".format(edge_conf['edge_public_ip']))
+        logging.info("Private IP: {}".format(edge_conf['edge_private_ip']))
+        logging.info("Key name: {}".format(edge_conf['key_name']))
+        logging.info("User storage account name: {}".format(edge_conf['user_storage_account_name']))
+        logging.info("User container name: {}".format(edge_conf['user_container_name']))
         if os.environ['azure_datalake_enable'] == 'true':
             for datalake in AzureMeta.list_datalakes(edge_conf['resource_group_name']):
                 if edge_conf['datalake_store_name'] == datalake.tags["Name"]:
                     edge_conf['datalake_id'] = datalake.name
-            print("Data Lake name: {}".format(edge_conf['datalake_id']))
-            print("Data Lake tag name: {}".format(edge_conf['datalake_store_name']))
-            print("Data Lake Store user directory name: {}".format(edge_conf['datalake_user_directory_name']))
-        print("Notebook SG: {}".format(edge_conf['notebook_security_group_name']))
-        print("Edge SG: {}".format(edge_conf['edge_security_group_name']))
-        print("Notebook subnet: {}".format(edge_conf['private_subnet_cidr']))
+            logging.info("Data Lake name: {}".format(edge_conf['datalake_id']))
+            logging.info("Data Lake tag name: {}".format(edge_conf['datalake_store_name']))
+            logging.info("Data Lake Store user directory name: {}".format(edge_conf['datalake_user_directory_name']))
+        logging.info("Notebook SG: {}".format(edge_conf['notebook_security_group_name']))
+        logging.info("Edge SG: {}".format(edge_conf['edge_security_group_name']))
+        logging.info("Notebook subnet: {}".format(edge_conf['private_subnet_cidr']))
         with open("/root/result.json", 'w') as result:
             if os.environ['azure_datalake_enable'] == 'false':
                 res = {"hostname": edge_conf['instance_dns_name'],
@@ -334,7 +321,7 @@ if __name__ == "__main__":
                        "project_name": edge_conf['project_name'],
                        "@class": "com.epam.datalab.dto.azure.edge.EdgeInfoAzure",
                        "Action": "Create new EDGE server"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results.", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/azure/edge_prepare.py b/infrastructure-provisioning/src/general/scripts/azure/edge_prepare.py
index 528c8ab..1e94746 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/edge_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/edge_prepare.py
@@ -30,17 +30,11 @@ from Crypto.PublicKey import RSA
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/edge/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     try:
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         edge_conf = dict()
         edge_conf['service_base_name'] = os.environ['conf_service_base_name']
         edge_conf['resource_group_name'] = os.environ['azure_resource_group_name']
@@ -95,19 +89,18 @@ if __name__ == "__main__":
         # FUSE in case of absence of user's key
         fname = "{}{}.pub".format(os.environ['conf_key_dir'], edge_conf['user_keyname'])
         if not os.path.isfile(fname):
-            print("USERs PUBLIC KEY DOES NOT EXIST in {}".format(fname))
+            logging.info("USERs PUBLIC KEY DOES NOT EXIST in {}".format(fname))
             sys.exit(1)
 
-        print("Will create exploratory environment with edge node as access point as following: {}".format(json.dumps(edge_conf, sort_keys=True, indent=4, separators=(',', ': '))))
+        logging.info("Will create exploratory environment with edge node as access point as following: {}".format(json.dumps(edge_conf, sort_keys=True, indent=4, separators=(',', ': '))))
         logging.info(json.dumps(edge_conf))
     except Exception as err:
-        print("Failed to generate variables dictionary.")
+        logging.error("Failed to generate variables dictionary.")
         append_result("Failed to generate variables dictionary.", str(err))
         sys.exit(1)
 
     try:
         logging.info('[CREATE SUBNET]')
-        print('[CREATE SUBNET]')
         params = "--resource_group_name {} --vpc_name {} --region {} --vpc_cidr {} --subnet_name {} --prefix {}".\
             format(edge_conf['resource_group_name'], edge_conf['vpc_name'], edge_conf['region'], edge_conf['vpc_cidr'],
                    edge_conf['private_subnet_name'], edge_conf['private_subnet_prefix'])
@@ -117,22 +110,21 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.info('Error: {0}'.format(err))
         try:
             AzureActions().remove_subnet(edge_conf['resource_group_name'], edge_conf['vpc_name'],
                                          edge_conf['private_subnet_name'])
         except:
-            print("Subnet hasn't been created.")
+            logging.info("Subnet hasn't been created.")
         append_result("Failed to create subnet.", str(err))
         sys.exit(1)
 
     edge_conf['private_subnet_cidr'] = AzureMeta().get_subnet(edge_conf['resource_group_name'], edge_conf['vpc_name'],
                                                               edge_conf['private_subnet_name']).address_prefix
-    print('NEW SUBNET CIDR CREATED: {}'.format(edge_conf['private_subnet_cidr']))
+    logging.info('NEW SUBNET CIDR CREATED: {}'.format(edge_conf['private_subnet_cidr']))
 
     try:
         logging.info('[CREATE SECURITY GROUP FOR EDGE NODE]')
-        print('[CREATE SECURITY GROUP FOR EDGE]')
         edge_list_rules = [
             {
                 "name": "in-1",
@@ -400,7 +392,7 @@ if __name__ == "__main__":
                 AzureActions().remove_security_group(edge_conf['resource_group_name'],
                                                      edge_conf['edge_security_group_name'])
             except:
-                print("Edge Security group hasn't been created.")
+                logging.info("Edge Security group hasn't been created.")
             traceback.print_exc()
             append_result("Failed creating security group for edge node.", str(err))
             raise Exception
@@ -409,7 +401,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATE SECURITY GROUP FOR PRIVATE SUBNET]')
-        print('[CREATE SECURITY GROUP FOR PRIVATE SUBNET]')
         notebook_list_rules = [
             {
                 "name": "in-1",
@@ -500,7 +491,7 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.info('Error: {0}'.format(err))
         AzureActions().remove_subnet(edge_conf['resource_group_name'], edge_conf['vpc_name'],
                                      edge_conf['private_subnet_name'])
         AzureActions().remove_security_group(edge_conf['resource_group_name'], edge_conf['edge_security_group_name'])
@@ -508,11 +499,10 @@ if __name__ == "__main__":
             AzureActions().remove_security_group(edge_conf['resource_group_name'],
                                                  edge_conf['notebook_security_group_name'])
         except:
-            print("Notebook Security group hasn't been created.")
+            logging.info("Notebook Security group hasn't been created.")
         sys.exit(1)
 
     logging.info('[CREATING SECURITY GROUPS FOR MASTER NODE]')
-    print("[CREATING SECURITY GROUPS FOR MASTER NODE]")
     try:
         cluster_list_rules = [
             {
@@ -606,7 +596,7 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.info('Error: {0}'.format(err))
         AzureActions().remove_subnet(edge_conf['resource_group_name'], edge_conf['vpc_name'],
                                      edge_conf['private_subnet_name'])
         AzureActions().remove_security_group(edge_conf['resource_group_name'], edge_conf['edge_security_group_name'])
@@ -616,12 +606,11 @@ if __name__ == "__main__":
             AzureActions().remove_security_group(edge_conf['resource_group_name'],
                                                  edge_conf['master_security_group_name'])
         except:
-            print("Master Security group hasn't been created.")
+            logging.info("Master Security group hasn't been created.")
         append_result("Failed to create Security groups. Exception:" + str(err))
         sys.exit(1)
 
     logging.info('[CREATING SECURITY GROUPS FOR SLAVE NODES]')
-    print("[CREATING SECURITY GROUPS FOR SLAVE NODES]")
     try:
         params = "--resource_group_name {} --security_group_name {} --region {} --tags '{}' --list_rules '{}'".format(
             edge_conf['resource_group_name'], edge_conf['slave_security_group_name'], edge_conf['region'],
@@ -632,7 +621,7 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.info('Error: {0}'.format(err))
         AzureActions().remove_subnet(edge_conf['resource_group_name'], edge_conf['vpc_name'],
                                      edge_conf['private_subnet_name'])
         AzureActions().remove_security_group(edge_conf['resource_group_name'], edge_conf['edge_security_group_name'])
@@ -644,13 +633,12 @@ if __name__ == "__main__":
             AzureActions().remove_security_group(edge_conf['resource_group_name'],
                                                  edge_conf['slave_security_group_name'])
         except:
-            print("Slave Security group hasn't been created.")
+            logging.info("Slave Security group hasn't been created.")
         append_result("Failed to create Security groups. Exception:" + str(err))
         sys.exit(1)
 
     try:
         logging.info('[CREATE STORAGE ACCOUNT AND CONTAINERS]')
-        print('[CREATE STORAGE ACCOUNT AND CONTAINERS]')
 
         params = "--container_name {} --account_tags '{}' --resource_group_name {} --region {}". \
             format(edge_conf['edge_container_name'], json.dumps(edge_conf['storage_account_tags']),
@@ -661,7 +649,7 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.info('Error: {0}'.format(err))
         append_result("Failed to create storage account.", str(err))
         AzureActions().remove_subnet(edge_conf['resource_group_name'], edge_conf['vpc_name'],
                                      edge_conf['private_subnet_name'])
@@ -679,7 +667,6 @@ if __name__ == "__main__":
     if os.environ['azure_datalake_enable'] == 'true':
         try:
             logging.info('[CREATE DATA LAKE STORE DIRECTORY]')
-            print('[CREATE DATA LAKE STORE DIRECTORY]')
             params = "--resource_group_name {} --datalake_name {} --directory_name {} --ad_user {} --service_base_name {}". \
                 format(edge_conf['resource_group_name'], edge_conf['datalake_store_name'],
                        edge_conf['datalake_user_directory_name'], edge_conf['azure_ad_user_name'],
@@ -690,7 +677,7 @@ if __name__ == "__main__":
                 traceback.print_exc()
                 raise Exception
         except Exception as err:
-            print('Error: {0}'.format(err))
+            logging.info('Error: {0}'.format(err))
             append_result("Failed to create Data Lake Store directory.", str(err))
             AzureActions().remove_subnet(edge_conf['resource_group_name'], edge_conf['vpc_name'],
                                          edge_conf['private_subnet_name'])
@@ -708,8 +695,8 @@ if __name__ == "__main__":
                     if edge_conf['datalake_store_name'] == datalake.tags["Name"]:
                         AzureActions().remove_datalake_directory(datalake.name, edge_conf['datalake_user_directory_name'])
             except Exception as err:
-                print('Error: {0}'.format(err))
-                print("Data Lake Store directory hasn't been created.")
+                logging.info('Error: {0}'.format(err))
+                logging.info("Data Lake Store directory hasn't been created.")
             sys.exit(1)
 
     if os.environ['conf_os_family'] == 'debian':
@@ -721,7 +708,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATE EDGE INSTANCE]')
-        print('[CREATE EDGE INSTANCE]')
         params = "--instance_name {} --instance_size {} --region {} --vpc_name {} --network_interface_name {} \
             --security_group_name {} --subnet_name {} --service_base_name {} --resource_group_name {} \
             --datalab_ssh_user_name {} --public_ip_name {} --public_key '''{}''' --primary_disk_size {} \
@@ -738,11 +724,11 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.info('Error: {0}'.format(err))
         try:
             AzureActions().remove_instance(edge_conf['resource_group_name'], edge_conf['instance_name'])
         except:
-            print("The instance hasn't been created.")
+            logging.info("The instance hasn't been created.")
         AzureActions().remove_subnet(edge_conf['resource_group_name'], edge_conf['vpc_name'],
                                      edge_conf['private_subnet_name'])
         AzureActions().remove_security_group(edge_conf['resource_group_name'], edge_conf['edge_security_group_name'])
diff --git a/infrastructure-provisioning/src/general/scripts/azure/edge_start.py b/infrastructure-provisioning/src/general/scripts/azure/edge_start.py
index 2f9f2ba..05e9bd0 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/edge_start.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/edge_start.py
@@ -25,19 +25,12 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     AzureMeta = datalab.meta_lib.AzureMeta()
     AzureActions = datalab.actions_lib.AzureActions()
     edge_conf = dict()
@@ -51,7 +44,6 @@ if __name__ == "__main__":
                                                                             os.environ['azure_region'])
 
     logging.info('[START EDGE]')
-    print('[START EDGE]')
     try:
         AzureActions.start_instance(edge_conf['resource_group_name'], edge_conf['instance_name'])
     except Exception as err:
@@ -63,19 +55,18 @@ if __name__ == "__main__":
                                                                      edge_conf['instance_name'])
         private_ip_address = AzureMeta.get_private_ip_address(edge_conf['resource_group_name'],
                                                               edge_conf['instance_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(edge_conf['instance_name']))
-        print("Hostname: {}".format(edge_conf['instance_dns_name']))
-        print("Public IP: {}".format(public_ip_address))
-        print("Private IP: {}".format(private_ip_address))
+        logging.info("Instance name: {}".format(edge_conf['instance_name']))
+        logging.info("Hostname: {}".format(edge_conf['instance_dns_name']))
+        logging.info("Public IP: {}".format(public_ip_address))
+        logging.info("Private IP: {}".format(private_ip_address))
         with open("/root/result.json", 'w') as result:
             res = {"instance_name": edge_conf['instance_name'],
                    "hostname": edge_conf['instance_dns_name'],
                    "public_ip": public_ip_address,
                    "ip": private_ip_address,
                    "Action": "Start up notebook server"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/azure/edge_status.py b/infrastructure-provisioning/src/general/scripts/azure/edge_status.py
index 6f41654..c59dc37 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/edge_status.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/edge_status.py
@@ -24,7 +24,7 @@
 import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -32,20 +32,13 @@ import subprocess
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/edge/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     edge_conf = dict()
     edge_conf['service_base_name'] = os.environ['conf_service_base_name']
     edge_conf['resource_group_name'] = os.environ['azure_resource_group_name']
 
     try:
         logging.info('[COLLECT DATA]')
-        print('[COLLECTING DATA]')
+        logging.info('[COLLECTING DATA]')
         params = '--resource_group_name {} --list_resources "{}"'.format(edge_conf['resource_group_name'],
                                                                          os.environ['edge_list_resources'])
         try:
diff --git a/infrastructure-provisioning/src/general/scripts/azure/edge_stop.py b/infrastructure-provisioning/src/general/scripts/azure/edge_stop.py
index d2bbf5c..8c5efa0 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/edge_stop.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/edge_stop.py
@@ -25,19 +25,12 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     AzureMeta = datalab.meta_lib.AzureMeta()
     AzureActions = datalab.actions_lib.AzureActions()
     edge_conf = dict()
@@ -49,7 +42,6 @@ if __name__ == "__main__":
                                                            edge_conf['project_name'], edge_conf['endpoint_name'])
 
     logging.info('[STOP EDGE]')
-    print('[STOP EDGE]')
     try:
         AzureActions.stop_instance(edge_conf['resource_group_name'], edge_conf['instance_name'])
     except Exception as err:
@@ -60,7 +52,7 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"instance_name": edge_conf['instance_name'],
                    "Action": "Stop edge server"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/azure/edge_terminate.py b/infrastructure-provisioning/src/general/scripts/azure/edge_terminate.py
index f7c470a..fc317d9 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/edge_terminate.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/edge_terminate.py
@@ -25,122 +25,115 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
 
 
 def terminate_edge_node(resource_group_name, service_base_name, project_tag, subnet_name, vpc_name):
-    print("Terminating EDGE, notebook and dataengine virtual machines")
+    logging.info("Terminating EDGE, notebook and dataengine virtual machines")
     try:
         for vm in AzureMeta.compute_client.virtual_machines.list(resource_group_name):
             try:
                 if project_tag == vm.tags["project_tag"]:
                     AzureActions.remove_instance(resource_group_name, vm.name)
-                    print("Instance {} has been terminated".format(vm.name))
+                    logging.info("Instance {} has been terminated".format(vm.name))
             except:
                 pass
     except Exception as err:
         datalab.fab.append_result("Failed to terminate instance", str(err))
         sys.exit(1)
 
-    print("Removing network interfaces")
+    logging.info("Removing network interfaces")
     try:
         for network_interface in AzureMeta.list_network_interfaces(resource_group_name):
             try:
                 if project_tag == network_interface.tags["project_tag"]:
                     AzureActions.delete_network_if(resource_group_name, network_interface.name)
-                    print("Network interface {} has been removed".format(network_interface.name))
+                    logging.info("Network interface {} has been removed".format(network_interface.name))
             except:
                 pass
     except Exception as err:
         datalab.fab.append_result("Failed to remove network interfaces", str(err))
         sys.exit(1)
 
-    print("Removing static public IPs")
+    logging.info("Removing static public IPs")
     try:
         for static_public_ip in AzureMeta.list_static_ips(resource_group_name):
             try:
                 if project_tag in static_public_ip.tags["project_tag"]:
                     AzureActions.delete_static_public_ip(resource_group_name, static_public_ip.name)
-                    print("Static public IP {} has been removed".format(static_public_ip.name))
+                    logging.info("Static public IP {} has been removed".format(static_public_ip.name))
             except:
                 pass
     except Exception as err:
         datalab.fab.append_result("Failed to remove static IPs", str(err))
         sys.exit(1)
 
-    print("Removing disks")
+    logging.info("Removing disks")
     try:
         for disk in AzureMeta.list_disks(resource_group_name):
             try:
                 if project_tag in disk.tags["project_tag"]:
                     AzureActions.remove_disk(resource_group_name, disk.name)
-                    print("Disk {} has been removed".format(disk.name))
+                    logging.info("Disk {} has been removed".format(disk.name))
             except:
                 pass
     except Exception as err:
         datalab.fab.append_result("Failed to remove disks", str(err))
         sys.exit(1)
 
-    print("Removing storage account")
+    logging.info("Removing storage account")
     try:
         for storage_account in AzureMeta.list_storage_accounts(resource_group_name):
             try:
                 if project_tag == storage_account.tags["project_tag"]:
                     AzureActions.remove_storage_account(resource_group_name, storage_account.name)
-                    print("Storage account {} has been terminated".format(storage_account.name))
+                    logging.info("Storage account {} has been terminated".format(storage_account.name))
             except:
                 pass
     except Exception as err:
         datalab.fab.append_result("Failed to remove storage accounts", str(err))
         sys.exit(1)
 
-    print("Deleting Data Lake Store directory")
+    logging.info("Deleting Data Lake Store directory")
     try:
         for datalake in AzureMeta.list_datalakes(resource_group_name):
             try:
                 if service_base_name == datalake.tags["SBN"]:
                     AzureActions.remove_datalake_directory(datalake.name, project_tag + '-folder')
-                    print("Data Lake Store directory {} has been deleted".format(project_tag + '-folder'))
+                    logging.info("Data Lake Store directory {} has been deleted".format(project_tag + '-folder'))
             except:
                 pass
     except Exception as err:
         datalab.fab.append_result("Failed to remove Data Lake", str(err))
         sys.exit(1)
 
-    print("Removing security groups")
+    logging.info("Removing security groups")
     try:
         for sg in AzureMeta.network_client.network_security_groups.list(resource_group_name):
             try:
                 if project_tag == sg.tags["project_tag"]:
                     AzureActions.remove_security_group(resource_group_name, sg.name)
-                    print("Security group {} has been terminated".format(sg.name))
+                    logging.info("Security group {} has been terminated".format(sg.name))
             except:
                 pass
     except Exception as err:
         datalab.fab.append_result("Failed to remove security groups", str(err))
         sys.exit(1)
 
-    print("Removing private subnet")
+    logging.info("Removing private subnet")
     try:
         AzureActions.remove_subnet(resource_group_name, vpc_name, subnet_name)
-        print("Private subnet {} has been terminated".format(subnet_name))
+        logging.info("Private subnet {} has been terminated".format(subnet_name))
     except Exception as err:
         datalab.fab.append_result("Failed to remove subnet", str(err))
         sys.exit(1)
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/edge/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     AzureMeta = datalab.meta_lib.AzureMeta()
     AzureActions = datalab.actions_lib.AzureActions()
     edge_conf = dict()
@@ -157,7 +150,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[TERMINATE EDGE]')
-        print('[TERMINATE EDGE]')
         try:
             terminate_edge_node(edge_conf['resource_group_name'], edge_conf['service_base_name'],
                                 edge_conf['project_tag'], edge_conf['private_subnet_name'], edge_conf['vpc_name'])
@@ -173,7 +165,7 @@ if __name__ == "__main__":
             res = {"service_base_name": os.environ['conf_service_base_name'],
                    "project_name": edge_conf['project_name'],
                    "Action": "Terminate edge node"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/azure/jupyter_configure.py b/infrastructure-provisioning/src/general/scripts/azure/jupyter_configure.py
index 0f30b71..bc26e72 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/jupyter_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/jupyter_configure.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -33,12 +33,6 @@ import subprocess
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         AzureMeta = datalab.meta_lib.AzureMeta()
         AzureActions = datalab.actions_lib.AzureActions()
@@ -134,7 +128,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             instance_hostname, os.environ['conf_key_dir'] + os.environ['conf_key_name'] + ".pem",
             notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
@@ -152,7 +145,6 @@ if __name__ == "__main__":
     # configuring proxy on Notebook instance
     try:
         logging.info('[CONFIGURE PROXY ON JUPYTER INSTANCE]')
-        print('[CONFIGURE PROXY ON JUPYTER INSTANCE]')
         additional_config = {"proxy_host": edge_instance_private_hostname, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}"\
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
@@ -170,7 +162,6 @@ if __name__ == "__main__":
     # updating repositories & installing python packages
     try:
         logging.info('[INSTALLING PREREQUISITES TO JUPYTER NOTEBOOK INSTANCE]')
-        print('[INSTALLING PREREQUISITES TO JUPYTER NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}". \
             format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['azure_region'],
                    edge_instance_private_hostname)
@@ -187,7 +178,6 @@ if __name__ == "__main__":
     # installing and configuring jupiter and all dependencies
     try:
         logging.info('[CONFIGURE JUPYTER NOTEBOOK INSTANCE]')
-        print('[CONFIGURE JUPYTER NOTEBOOK INSTANCE]')
         params = "--hostname {0} --keyfile {1} " \
                  "--region {2} --spark_version {3} " \
                  "--hadoop_version {4} --os_user {5} " \
@@ -211,7 +201,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": notebook_config['user_keyname'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -228,7 +217,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
@@ -245,7 +233,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[POST CONFIGURING PROCESS]')
-        print('[POST CONFIGURING PROCESS')
         if notebook_config['notebook_image_name'] not in [notebook_config['expected_image_name'], 'None', '']:
             params = "--hostname {} --keyfile {} --os_user {} --resource_group_name {} --notebook_name {}" \
                 .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
@@ -262,11 +249,11 @@ if __name__ == "__main__":
 
     if notebook_config['image_enabled'] == 'true':
         try:
-            print('[CREATING IMAGE]')
+            logging.info('[CREATING IMAGE]')
             image = AzureMeta.get_image(notebook_config['resource_group_name'],
                                         notebook_config['expected_image_name'])
             if image == '':
-                print("Looks like it's first time we configure notebook server. Creating image.")
+                logging.info("Looks like it's first time we configure notebook server. Creating image.")
                 datalab.actions_lib.prepare_vm_for_image(True, notebook_config['datalab_ssh_user'], instance_hostname,
                                                          keyfile_name)
                 AzureActions.create_image_from_instance(notebook_config['resource_group_name'],
@@ -274,7 +261,7 @@ if __name__ == "__main__":
                                                         os.environ['azure_region'],
                                                         notebook_config['expected_image_name'],
                                                         json.dumps(notebook_config['image_tags']))
-                print("Image was successfully created.")
+                logging.info("Image was successfully created.")
                 subprocess.run("~/scripts/{}.py".format('common_prepare_notebook'), shell=True, check=True)
                 instance_running = False
                 while not instance_running:
@@ -298,7 +285,6 @@ if __name__ == "__main__":
             sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         additional_info = {
             'instance_hostname': instance_hostname,
@@ -336,17 +322,16 @@ if __name__ == "__main__":
             notebook_config['exploratory_name'])
         jupyter_ungit_access_url = "https://" + edge_instance_hostname + "/{}-ungit/".format(
             notebook_config['exploratory_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['instance_name']))
-        print("Private IP: {}".format(ip_address))
-        print("Instance type: {}".format(notebook_config['instance_size']))
-        print("Key name: {}".format(notebook_config['key_name']))
-        print("User key name: {}".format(notebook_config['user_keyname']))
-        print("SG name: {}".format(notebook_config['security_group_name']))
-        print("Jupyter URL: {}".format(jupyter_ip_url))
-        print("Ungit URL: {}".format(ungit_ip_url))
-        print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.
+        logging.info("Instance name: {}".format(notebook_config['instance_name']))
+        logging.info("Private IP: {}".format(ip_address))
+        logging.info("Instance type: {}".format(notebook_config['instance_size']))
+        logging.info("Key name: {}".format(notebook_config['key_name']))
+        logging.info("User key name: {}".format(notebook_config['user_keyname']))
+        logging.info("SG name: {}".format(notebook_config['security_group_name']))
+        logging.info("Jupyter URL: {}".format(jupyter_ip_url))
+        logging.info("Ungit URL: {}".format(ungit_ip_url))
+        logging.info('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.
               format(notebook_config['key_name'], notebook_config['datalab_ssh_user'], ip_address))
 
         with open("/root/result.json", 'w') as result:
diff --git a/infrastructure-provisioning/src/general/scripts/azure/jupyterlab_configure.py b/infrastructure-provisioning/src/general/scripts/azure/jupyterlab_configure.py
index 4d8fe90..d8cefb6 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/jupyterlab_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/jupyterlab_configure.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -33,12 +33,6 @@ import subprocess
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         AzureMeta = datalab.meta_lib.AzureMeta()
         AzureActions = datalab.actions_lib.AzureActions()
@@ -134,7 +128,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format \
             (instance_hostname, os.environ['conf_key_dir'] + os.environ['conf_key_name'] + ".pem", initial_user,
              notebook_config['datalab_ssh_user'], sudo_group)
@@ -152,7 +145,6 @@ if __name__ == "__main__":
     # configuring proxy on Notebook instance
     try:
         logging.info('[CONFIGURE PROXY ON JUPYTER INSTANCE]')
-        print('[CONFIGURE PROXY ON JUPYTER INSTANCE]')
         additional_config = {"proxy_host": edge_instance_private_hostname, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}"\
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
@@ -170,7 +162,6 @@ if __name__ == "__main__":
     # updating repositories & installing python packages
     try:
         logging.info('[INSTALLING PREREQUISITES TO JUPYTER NOTEBOOK INSTANCE]')
-        print('[INSTALLING PREREQUISITES TO JUPYTER NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}". \
             format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['azure_region'],
                    edge_instance_private_hostname)
@@ -187,7 +178,6 @@ if __name__ == "__main__":
     # installing and configuring jupiter and all dependencies
     try:
         logging.info('[CONFIGURE JUPYTERLAB NOTEBOOK INSTANCE]')
-        print('[CONFIGURE JUPYTER NOTEBOOK INSTANCE]')
         params = "--hostname {0} --keyfile {1} " \
                  "--region {2} --spark_version {3} " \
                  "--hadoop_version {4} --os_user {5} " \
@@ -210,7 +200,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": notebook_config['user_keyname'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -227,7 +216,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
@@ -244,10 +232,10 @@ if __name__ == "__main__":
 
     if notebook_config['image_enabled'] == 'true':
         try:
-            print('[CREATING IMAGE]')
+            logging.info('[CREATING IMAGE]')
             image = AzureMeta.get_image(notebook_config['resource_group_name'], notebook_config['expected_image_name'])
             if image == '':
-                print("Looks like it's first time we configure notebook server. Creating image.")
+                logging.info("Looks like it's first time we configure notebook server. Creating image.")
                 datalab.actions_lib.prepare_vm_for_image(True, notebook_config['datalab_ssh_user'], instance_hostname,
                                                          keyfile_name)
                 AzureActions.create_image_from_instance(notebook_config['resource_group_name'],
@@ -255,7 +243,7 @@ if __name__ == "__main__":
                                                         os.environ['azure_region'],
                                                         notebook_config['expected_image_name'],
                                                         json.dumps(notebook_config['image_tags']))
-                print("Image was successfully created.")
+                logging.info("Image was successfully created.")
                 subprocess.run("~/scripts/{}.py".format('common_prepare_notebook'), shell=True, check=True)
                 instance_running = False
                 while not instance_running:
@@ -279,7 +267,6 @@ if __name__ == "__main__":
             sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         additional_info = {
             'instance_hostname': instance_hostname,
@@ -308,7 +295,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[CONFIGURING PROXY FOR DOCKER]')
         logging.info('[CONFIGURING PROXY FOR DOCKER]')
         params = "--hostname {} " \
                  "--keyfile {} " \
@@ -328,7 +314,6 @@ if __name__ == "__main__":
 
 
     try:
-        print('[STARTING JUPYTER CONTAINER]')
         logging.info('[STARTING JUPYTER CONTAINER]')
         params = "--hostname {} " \
                  "--keyfile {} " \
@@ -356,17 +341,16 @@ if __name__ == "__main__":
             notebook_config['exploratory_name'])
         jupyter_ungit_acces_url = "http://" + edge_instance_hostname + "/{}-ungit/".format(
             notebook_config['exploratory_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['instance_name']))
-        print("Private IP: {}".format(ip_address))
-        print("Instance type: {}".format(notebook_config['instance_size']))
-        print("Key name: {}".format(notebook_config['key_name']))
-        print("User key name: {}".format(notebook_config['user_keyname']))
-        print("SG name: {}".format(notebook_config['security_group_name']))
-        print("Jupyter URL: {}".format(jupyter_ip_url))
-        print("Ungit URL: {}".format(ungit_ip_url))
-        print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.
+        logging.info("Instance name: {}".format(notebook_config['instance_name']))
+        logging.info("Private IP: {}".format(ip_address))
+        logging.info("Instance type: {}".format(notebook_config['instance_size']))
+        logging.info("Key name: {}".format(notebook_config['key_name']))
+        logging.info("User key name: {}".format(notebook_config['user_keyname']))
+        logging.info("SG name: {}".format(notebook_config['security_group_name']))
+        logging.info("Jupyter URL: {}".format(jupyter_ip_url))
+        logging.info("Ungit URL: {}".format(ungit_ip_url))
+        logging.info('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.
               format(notebook_config['key_name'], notebook_config['datalab_ssh_user'], ip_address))
 
         with open("/root/result.json", 'w') as result:
diff --git a/infrastructure-provisioning/src/general/scripts/azure/project_prepare.py b/infrastructure-provisioning/src/general/scripts/azure/project_prepare.py
index 29fce72..a3bc925 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/project_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/project_prepare.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -34,15 +34,8 @@ from Crypto.PublicKey import RSA
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/project/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     try:
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         AzureMeta = datalab.meta_lib.AzureMeta()
         AzureActions = datalab.actions_lib.AzureActions()
         project_conf = dict()
@@ -134,12 +127,12 @@ if __name__ == "__main__":
                 subprocess.run('echo "{0}" >> {1}{2}.pub'.format(project_conf['user_key'], os.environ['conf_key_dir'],
                                                         project_conf['project_name']), shell=True, check=True)
             except:
-                print("ADMINSs PUBLIC KEY DOES NOT INSTALLED")
+                logging.info("ADMINSs PUBLIC KEY DOES NOT INSTALLED")
         except KeyError:
-            print("ADMINSs PUBLIC KEY DOES NOT UPLOADED")
+            logging.error("ADMINSs PUBLIC KEY DOES NOT UPLOADED")
             sys.exit(1)
 
-        print("Will create exploratory environment with edge node as access point as following: {}".format(json.dumps(
+        logging.info("Will create exploratory environment with edge node as access point as following: {}".format(json.dumps(
             project_conf, sort_keys=True, indent=4, separators=(',', ': '))))
         logging.info(json.dumps(project_conf))
     except Exception as err:
@@ -149,7 +142,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATE SUBNET]')
-        print('[CREATE SUBNET]')
         params = "--resource_group_name {} --vpc_name {} --region {} --vpc_cidr {} --subnet_name {} --prefix {}".\
             format(project_conf['resource_group_name'], project_conf['vpc_name'], project_conf['region'],
                    project_conf['vpc_cidr'], project_conf['private_subnet_name'], project_conf['private_subnet_prefix'])
@@ -163,19 +155,19 @@ if __name__ == "__main__":
             AzureActions.remove_subnet(project_conf['resource_group_name'], project_conf['vpc_name'],
                                        project_conf['private_subnet_name'])
         except:
-            print("Subnet hasn't been created.")
+            logging.info("Subnet hasn't been created.")
         datalab.fab.append_result("Failed to create subnet.", str(err))
         sys.exit(1)
 
     project_conf['private_subnet_cidr'] = AzureMeta.get_subnet(project_conf['resource_group_name'],
                                                                project_conf['vpc_name'],
                                                                project_conf['private_subnet_name']).address_prefix
-    print('NEW SUBNET CIDR CREATED: {}'.format(project_conf['private_subnet_cidr']))
+    logging.info('NEW SUBNET CIDR CREATED: {}'.format(project_conf['private_subnet_cidr']))
 
     try:
         if 'azure_edge_security_group_name' in os.environ:
             logging.info('Security group predefined, adding new rule with endpoint IP')
-            print('Security group predefined, adding new rule with endpoint IP')
+            logging.info('Security group predefined, adding new rule with endpoint IP')
             if project_conf['endpoint_name'] == 'local':
                 endpoint_ip = AzureMeta.get_instance_public_ip_address(project_conf['resource_group_name'],
                                                           '{}-ssn'.format(project_conf['service_base_name']))
@@ -214,7 +206,7 @@ if __name__ == "__main__":
                                            project_conf['private_subnet_name'])
         else:
             logging.info('[CREATE SECURITY GROUP FOR EDGE NODE]')
-            print('[CREATE SECURITY GROUP FOR EDGE]')
+            logging.info('[CREATE SECURITY GROUP FOR EDGE]')
             edge_list_rules = [
                 {
                     "name": "in-1",
@@ -493,7 +485,7 @@ if __name__ == "__main__":
                     AzureActions.remove_security_group(project_conf['resource_group_name'],
                                                        project_conf['edge_security_group_name'])
                 except:
-                    print("Edge Security group hasn't been created.")
+                    logging.info("Edge Security group hasn't been created.")
                 traceback.print_exc()
                 datalab.fab.append_result("Failed creating security group for edge node.", str(err))
                 raise Exception
@@ -503,7 +495,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATE SECURITY GROUP FOR PRIVATE SUBNET]')
-        print('[CREATE SECURITY GROUP FOR PRIVATE SUBNET]')
         notebook_list_rules = [
             {
                 "name": "in-1",
@@ -606,11 +597,10 @@ if __name__ == "__main__":
             AzureActions.remove_security_group(project_conf['resource_group_name'],
                                                project_conf['notebook_security_group_name'])
         except:
-            print("Notebook Security group hasn't been created.")
+            logging.info("Notebook Security group hasn't been created.")
         sys.exit(1)
 
     logging.info('[CREATING SECURITY GROUPS FOR MASTER NODE]')
-    print("[CREATING SECURITY GROUPS FOR MASTER NODE]")
     try:
         cluster_list_rules = [
             {
@@ -715,12 +705,11 @@ if __name__ == "__main__":
             AzureActions.remove_security_group(project_conf['resource_group_name'],
                                                project_conf['master_security_group_name'])
         except:
-            print("Master Security group hasn't been created.")
+            logging.info("Master Security group hasn't been created.")
         datalab.fab.append_result("Failed to create Security groups. Exception:" + str(err))
         sys.exit(1)
 
     logging.info('[CREATING SECURITY GROUPS FOR SLAVE NODES]')
-    print("[CREATING SECURITY GROUPS FOR SLAVE NODES]")
     try:
         params = "--resource_group_name {} --security_group_name {} --region {} --tags '{}' --list_rules '{}'".format(
             project_conf['resource_group_name'], project_conf['slave_security_group_name'], project_conf['region'],
@@ -744,13 +733,12 @@ if __name__ == "__main__":
             AzureActions.remove_security_group(project_conf['resource_group_name'],
                                                project_conf['slave_security_group_name'])
         except:
-            print("Slave Security group hasn't been created.")
+            logging.info("Slave Security group hasn't been created.")
         datalab.fab.append_result("Failed to create Security groups. Exception:" + str(err))
         sys.exit(1)
 
     try:
         logging.info('[CREATE SHARED STORAGE ACCOUNT AND CONTAINER]')
-        print('[CREATE SHARED STORAGE ACCOUNT AND CONTAINER]')
         params = "--container_name {} --account_tags '{}' --resource_group_name {} --region {}". \
             format(project_conf['shared_container_name'], json.dumps(project_conf['shared_storage_account_tags']),
                    project_conf['resource_group_name'], project_conf['region'])
@@ -775,7 +763,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATE STORAGE ACCOUNT AND CONTAINERS]')
-        print('[CREATE STORAGE ACCOUNT AND CONTAINERS]')
 
         params = "--container_name {} --account_tags '{}' --resource_group_name {} --region {}". \
             format(project_conf['edge_container_name'], json.dumps(project_conf['storage_account_tags']),
@@ -808,7 +795,7 @@ if __name__ == "__main__":
     if os.environ['azure_datalake_enable'] == 'true':
         try:
             logging.info('[CREATE DATA LAKE STORE DIRECTORY]')
-            print('[CREATE DATA LAKE STORE DIRECTORY]')
+            logging.info('[CREATE DATA LAKE STORE DIRECTORY]')
             params = "--resource_group_name {} --datalake_name {} --directory_name {} --ad_user {} " \
                      "--service_base_name {}".format(project_conf['resource_group_name'],
                                                      project_conf['datalake_store_name'],
@@ -844,7 +831,7 @@ if __name__ == "__main__":
                         AzureActions.remove_datalake_directory(datalake.name,
                                                                  project_conf['datalake_user_directory_name'])
             except:
-                print("Data Lake Store directory hasn't been created.")
+                logging.info("Data Lake Store directory hasn't been created.")
             sys.exit(1)
 
     if os.environ['conf_os_family'] == 'debian':
@@ -856,7 +843,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATE EDGE INSTANCE]')
-        print('[CREATE EDGE INSTANCE]')
         if 'azure_edge_security_group_name' in os.environ:
             project_conf['edge_security_group_name'] = os.environ['azure_edge_security_group_name']
         params = "--instance_name {} --instance_size {} --region {} --vpc_name {} --network_interface_name {} \
@@ -880,7 +866,7 @@ if __name__ == "__main__":
         try:
             AzureActions.remove_instance(project_conf['resource_group_name'], project_conf['instance_name'])
         except:
-            print("The instance hasn't been created.")
+            logging.info("The instance hasn't been created.")
         AzureActions.remove_subnet(project_conf['resource_group_name'], project_conf['vpc_name'],
                                    project_conf['private_subnet_name'])
         if 'azure_edge_security_group_name' not in os.environ:
diff --git a/infrastructure-provisioning/src/general/scripts/azure/project_terminate.py b/infrastructure-provisioning/src/general/scripts/azure/project_terminate.py
index cfe04db..7be7bc5 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/project_terminate.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/project_terminate.py
@@ -25,7 +25,7 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import requests
 import sys
@@ -33,96 +33,96 @@ import traceback
 
 
 def terminate_edge_node(resource_group_name, service_base_name, project_tag, subnet_name, vpc_name, endpoint_name):
-    print("Terminating EDGE, notebook and dataengine virtual machines")
+    logging.info("Terminating EDGE, notebook and dataengine virtual machines")
     try:
         for vm in AzureMeta.compute_client.virtual_machines.list(resource_group_name):
             try:
                 if project_tag == vm.tags["project_tag"]:
                     AzureActions.remove_instance(resource_group_name, vm.name)
-                    print("Instance {} has been terminated".format(vm.name))
+                    logging.info("Instance {} has been terminated".format(vm.name))
             except:
                 pass
     except Exception as err:
         datalab.fab.append_result("Failed to terminate edge instance.", str(err))
         sys.exit(1)
 
-    print("Removing network interfaces")
+    logging.info("Removing network interfaces")
     try:
         for network_interface in AzureMeta.list_network_interfaces(resource_group_name):
             try:
                 if project_tag == network_interface.tags["project_name"]:
                     AzureActions.delete_network_if(resource_group_name, network_interface.name)
-                    print("Network interface {} has been removed".format(network_interface.name))
+                    logging.info("Network interface {} has been removed".format(network_interface.name))
             except:
                 pass
     except Exception as err:
         datalab.fab.append_result("Failed to remove network interfaces.", str(err))
         sys.exit(1)
 
-    print("Removing static public IPs")
+    logging.info("Removing static public IPs")
     try:
         for static_public_ip in AzureMeta.list_static_ips(resource_group_name):
             try:
                 if project_tag in static_public_ip.tags["project_tag"]:
                     AzureActions.delete_static_public_ip(resource_group_name, static_public_ip.name)
-                    print("Static public IP {} has been removed".format(static_public_ip.name))
+                    logging.info("Static public IP {} has been removed".format(static_public_ip.name))
             except:
                 pass
     except Exception as err:
         datalab.fab.append_result("Failed to remove static IP addresses.", str(err))
         sys.exit(1)
 
-    print("Removing disks")
+    logging.info("Removing disks")
     try:
         for disk in AzureMeta.list_disks(resource_group_name):
             try:
                 if project_tag in disk.tags["project_tag"]:
                     AzureActions.remove_disk(resource_group_name, disk.name)
-                    print("Disk {} has been removed".format(disk.name))
+                    logging.info("Disk {} has been removed".format(disk.name))
             except:
                 pass
     except Exception as err:
         datalab.fab.append_result("Failed to remove volumes.", str(err))
         sys.exit(1)
 
-    print("Removing storage account")
+    logging.info("Removing storage account")
     try:
         for storage_account in AzureMeta.list_storage_accounts(resource_group_name):
             try:
                 if project_tag == storage_account.tags["project_tag"]:
                     AzureActions.remove_storage_account(resource_group_name, storage_account.name)
-                    print("Storage account {} has been terminated".format(storage_account.name))
+                    logging.info("Storage account {} has been terminated".format(storage_account.name))
             except:
                 pass
     except Exception as err:
         datalab.fab.append_result("Failed to remove storage accounts.", str(err))
         sys.exit(1)
 
-    print("Deleting Data Lake Store directory")
+    logging.info("Deleting Data Lake Store directory")
     try:
         for datalake in AzureMeta.list_datalakes(resource_group_name):
             try:
                 if service_base_name == datalake.tags["SBN"]:
                     AzureActions.remove_datalake_directory(datalake.name, project_tag + '-folder')
-                    print("Data Lake Store directory {} has been deleted".format(project_tag + '-folder'))
+                    logging.info("Data Lake Store directory {} has been deleted".format(project_tag + '-folder'))
             except:
                 pass
     except Exception as err:
         datalab.fab.append_result("Failed to remove Data Lake.", str(err))
         sys.exit(1)
 
-    print("Removing project specific images")
+    logging.info("Removing project specific images")
     try:
         for image in AzureMeta.list_images():
             if service_base_name == image.tags["SBN"] and project_tag == image.tags["project_tag"] \
                     and endpoint_name == image.tags["endpoint_tag"]:
                 AzureActions.remove_image(resource_group_name, image.name)
-                print("Image {} has been removed".format(image.name))
+                logging.info("Image {} has been removed".format(image.name))
     except Exception as err:
         datalab.fab.append_result("Failed to remove images", str(err))
         sys.exit(1)
 
-    print("Removing security groups")
+    logging.info("Removing security groups")
     try:
         if 'azure_edge_security_group_name' in os.environ:
             AzureActions.remove_security_rules(os.environ['azure_edge_security_group_name'],
@@ -134,31 +134,24 @@ def terminate_edge_node(resource_group_name, service_base_name, project_tag, sub
             try:
                 if project_tag == sg.tags["project_tag"]:
                     AzureActions.remove_security_group(resource_group_name, sg.name)
-                    print("Security group {} has been terminated".format(sg.name))
+                    logging.info("Security group {} has been terminated".format(sg.name))
             except:
                 pass
     except Exception as err:
         datalab.fab.append_result("Failed to remove security groups.", str(err))
         sys.exit(1)
 
-    print("Removing private subnet")
+    logging.info("Removing private subnet")
     try:
         AzureActions.remove_subnet(resource_group_name, vpc_name, subnet_name)
-        print("Private subnet {} has been terminated".format(subnet_name))
+        logging.info("Private subnet {} has been terminated".format(subnet_name))
     except Exception as err:
         datalab.fab.append_result("Failed to remove subnets.", str(err))
         sys.exit(1)
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/edge/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     AzureMeta = datalab.meta_lib.AzureMeta()
     AzureActions = datalab.actions_lib.AzureActions()
     project_conf = dict()
@@ -175,7 +168,7 @@ if __name__ == "__main__":
 
     try:
         logging.info('[TERMINATE EDGE]')
-        print('[TERMINATE EDGE]')
+        logging.info('[TERMINATE EDGE]')
         try:
             terminate_edge_node(project_conf['resource_group_name'], project_conf['service_base_name'],
                                 project_conf['project_tag'], project_conf['private_subnet_name'],
@@ -188,7 +181,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[KEYCLOAK PROJECT CLIENT DELETE]')
         logging.info('[KEYCLOAK PROJECT CLIENT DELETE]')
         keycloak_auth_server_url = '{}/realms/master/protocol/openid-connect/token'.format(
             os.environ['keycloak_auth_server_url'])
@@ -223,14 +215,14 @@ if __name__ == "__main__":
                                           headers={"Authorization": "Bearer " + keycloak_token.get("access_token"),
                                                    "Content-Type": "application/json"})
     except Exception as err:
-        print("Failed to remove project client from Keycloak", str(err))
+        logging.info("Failed to remove project client from Keycloak", str(err))
 
     try:
         with open("/root/result.json", 'w') as result:
             res = {"service_base_name": os.environ['conf_service_base_name'],
                    "project_name": project_conf['project_name'],
                    "Action": "Terminate edge node"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/azure/rstudio_change_pass.py b/infrastructure-provisioning/src/general/scripts/azure/rstudio_change_pass.py
index 57a3aa5..d90e4fb 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/rstudio_change_pass.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/rstudio_change_pass.py
@@ -24,6 +24,7 @@
 import argparse
 import sys
 from datalab.fab import *
+from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -34,15 +35,15 @@ parser.add_argument('--rstudio_pass', type=str, default='')
 args = parser.parse_args()
 
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     global conn
     conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
 
-    print("Setting password for Rstudio user.")
+    logging.info("Setting password for Rstudio user.")
     try:
         conn.sudo('''bash -c 'echo "{0}:{1}" | chpasswd' '''.format(args.os_user, args.rstudio_pass))
         conn.close()
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.info('Error: {0}'.format(err))
         sys.exit(1)
 
diff --git a/infrastructure-provisioning/src/general/scripts/azure/rstudio_configure.py b/infrastructure-provisioning/src/general/scripts/azure/rstudio_configure.py
index bbf85e5..9395f6d 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/rstudio_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/rstudio_configure.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -34,12 +34,6 @@ from fabric import *
 
 if __name__ == "__main__":
     instance_class = 'notebook'
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         AzureMeta = datalab.meta_lib.AzureMeta()
         AzureActions = datalab.actions_lib.AzureActions()
@@ -135,7 +129,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             instance_hostname, os.environ['conf_key_dir'] + os.environ['conf_key_name'] + ".pem",
             notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
@@ -153,7 +146,6 @@ if __name__ == "__main__":
     # configuring proxy on Notebook instance
     try:
         logging.info('[CONFIGURE PROXY ON R_STUDIO INSTANCE]')
-        print('[CONFIGURE PROXY ON R_STUDIO INSTANCE]')
         additional_config = {"proxy_host": edge_instance_private_hostname, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}" \
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
@@ -171,7 +163,6 @@ if __name__ == "__main__":
     # updating repositories & installing python packages
     try:
         logging.info('[INSTALLING PREREQUISITES TO R_STUDIO NOTEBOOK INSTANCE]')
-        print('[INSTALLING PREREQUISITES TO R_STUDIO NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}". \
             format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['azure_region'],
                    edge_instance_private_hostname)
@@ -188,7 +179,6 @@ if __name__ == "__main__":
     # installing and configuring R_STUDIO and all dependencies
     try:
         logging.info('[CONFIGURE RSTUDIO NOTEBOOK INSTANCE]')
-        print('[CONFIGURE RSTUDIO NOTEBOOK INSTANCE]')
         params = "--hostname {0}  --keyfile {1} " \
                  "--region {2} --rstudio_pass {3} " \
                  "--rstudio_version {4} --os_user {5} " \
@@ -211,7 +201,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": notebook_config['user_keyname'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -228,7 +217,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
@@ -244,7 +232,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[POST CONFIGURING PROCESS]')
-        print('[POST CONFIGURING PROCESS')
         if notebook_config['notebook_image_name'] not in [notebook_config['expected_image_name'], 'None', '']:
             params = "--hostname {} --keyfile {} --os_user {} --resource_group_name {} --notebook_name {}" \
                 .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
@@ -261,11 +248,11 @@ if __name__ == "__main__":
 
     if notebook_config['image_enabled'] == 'true':
         try:
-            print('[CREATING IMAGE]')
+            logging.info('[CREATING IMAGE]')
             image = AzureMeta.get_image(notebook_config['resource_group_name'],
                                         notebook_config['expected_image_name'])
             if image == '':
-                print("Looks like it's first time we configure notebook server. Creating image.")
+                logging.info("Looks like it's first time we configure notebook server. Creating image.")
                 datalab.actions_lib.prepare_vm_for_image(True, notebook_config['datalab_ssh_user'], instance_hostname,
                                                          keyfile_name)
                 AzureActions.create_image_from_instance(notebook_config['resource_group_name'],
@@ -273,7 +260,7 @@ if __name__ == "__main__":
                                                         os.environ['azure_region'],
                                                         notebook_config['expected_image_name'],
                                                         json.dumps(notebook_config['image_tags']))
-                print("Image was successfully created.")
+                logging.info("Image was successfully created.")
                 subprocess.run("~/scripts/{}.py".format('common_prepare_notebook'), shell=True, check=True)
                 instance_running = False
                 while not instance_running:
@@ -297,7 +284,7 @@ if __name__ == "__main__":
             sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
+        logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         additional_info = {
             'instance_hostname': instance_hostname,
@@ -335,19 +322,18 @@ if __name__ == "__main__":
             notebook_config['exploratory_name'])
         rstudio_ungit_access_url = "https://" + edge_instance_hostname + "/{}-ungit/".format(
             notebook_config['exploratory_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['instance_name']))
-        print("Private IP: {}".format(ip_address))
-        print("Instance type: {}".format(notebook_config['instance_size']))
-        print("Key name: {}".format(notebook_config['key_name']))
-        print("User key name: {}".format(notebook_config['user_keyname']))
-        print("SG name: {}".format(notebook_config['security_group_name']))
-        print("Rstudio URL: {}".format(rstudio_ip_url))
-        print("Rstudio user: {}".format(notebook_config['datalab_ssh_user']))
-        print("Rstudio pass: {}".format(notebook_config['rstudio_pass']))
-        print("Ungit URL: {}".format(ungit_ip_url))
-        print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.
+        logging.info("Instance name: {}".format(notebook_config['instance_name']))
+        logging.info("Private IP: {}".format(ip_address))
+        logging.info("Instance type: {}".format(notebook_config['instance_size']))
+        logging.info("Key name: {}".format(notebook_config['key_name']))
+        logging.info("User key name: {}".format(notebook_config['user_keyname']))
+        logging.info("SG name: {}".format(notebook_config['security_group_name']))
+        logging.info("Rstudio URL: {}".format(rstudio_ip_url))
+        logging.info("Rstudio user: {}".format(notebook_config['datalab_ssh_user']))
+        logging.info("Rstudio pass: {}".format(notebook_config['rstudio_pass']))
+        logging.info("Ungit URL: {}".format(ungit_ip_url))
+        logging.info('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.
               format(notebook_config['key_name'], notebook_config['datalab_ssh_user'], ip_address))
 
         with open("/root/result.json", 'w') as result:
diff --git a/infrastructure-provisioning/src/general/scripts/azure/ssn_configure.py b/infrastructure-provisioning/src/general/scripts/azure/ssn_configure.py
index 7c4e802..ee959e6 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/ssn_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/ssn_configure.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -34,12 +34,6 @@ import uuid
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}.log".format(os.environ['conf_resource'], os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     def clear_resources():
         AzureActions.remove_instance(ssn_conf['resource_group_name'], ssn_conf['instance_name'])
         for datalake in AzureMeta.list_datalakes(ssn_conf['resource_group_name']):
@@ -63,7 +57,6 @@ if __name__ == "__main__":
         ssn_conf['instance'] = 'ssn'
 
         logging.info('[DERIVING NAMES]')
-        print('[DERIVING NAMES]')
 
         ssn_conf['billing_enabled'] = True
         # We need to cut service_base_name to 20 symbols do to the Azure Name length limitation
@@ -140,7 +133,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format \
             (ssn_conf['instance_host'], ssn_conf['ssh_key_path'], ssn_conf['initial_user'],
              ssn_conf['datalab_ssh_user'],
@@ -154,7 +146,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[INSTALLING PREREQUISITES TO SSN INSTANCE]')
-        print('[INSTALLING PREREQUISITES TO SSN INSTANCE]')
         params = "--hostname {} --keyfile {} --pip_packages 'backoff bcrypt==3.1.7 argparse fabric==1.14.0 pymongo pyyaml " \
                  "pycryptodome azure==2.0.0' --user {} --region {}".format(ssn_conf['instance_host'],
                                                                        ssn_conf['ssh_key_path'],
@@ -169,7 +160,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CONFIGURE SSN INSTANCE]')
-        print('[CONFIGURE SSN INSTANCE]')
         additional_config = {"nginx_template_dir": "/root/templates/",
                              "service_base_name": ssn_conf['service_base_name'],
                              "security_group_id": ssn_conf['security_group_name'], "vpc_id": ssn_conf['vpc_name'],
@@ -188,7 +178,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CONFIGURING DOCKER AT SSN INSTANCE]')
-        print('[CONFIGURING DOCKER AT SSN INSTANCE]')
         additional_config = [{"name": "base", "tag": "latest"},
                              {"name": "edge", "tag": "latest"},
                              {"name": "project", "tag": "latest"},
@@ -213,7 +202,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CONFIGURE SSN INSTANCE UI]')
-        print('[CONFIGURE SSN INSTANCE UI]')
         ssn_conf['azure_auth_path'] = '/home/{}/keys/azure_auth.json'.format(ssn_conf['datalab_ssh_user'])
         ssn_conf['ldap_login'] = 'false'
 
@@ -523,7 +511,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     logging.info('[CREATE KEYCLOAK CLIENT]')
-    print('[CREATE KEYCLOAK CLIENT]')
     keycloak_params = "--service_base_name {} --keycloak_auth_server_url {} --keycloak_realm_name {} " \
                       "--keycloak_user {} --keycloak_user_password {} --instance_public_ip {} --keycloak_client_secret {} " \
         .format(ssn_conf['service_base_name'], os.environ['keycloak_auth_server_url'],
@@ -539,41 +526,39 @@ if __name__ == "__main__":
 
     try:
         logging.info('[SUMMARY]')
-
-        print('[SUMMARY]')
-        print("Service base name: {}".format(ssn_conf['service_base_name']))
-        print("SSN Name: {}".format(ssn_conf['instance_name']))
+        logging.info("Service base name: {}".format(ssn_conf['service_base_name']))
+        logging.info("SSN Name: {}".format(ssn_conf['instance_name']))
         if os.environ['conf_network_type'] == 'public':
-            print("SSN Public IP address: {}".format(ssn_conf['instnace_ip']))
-            print("SSN Hostname: {}".format(ssn_conf['instance_dns_name']))
+            logging.info("SSN Public IP address: {}".format(ssn_conf['instnace_ip']))
+            logging.info("SSN Hostname: {}".format(ssn_conf['instance_dns_name']))
         else:
-            print("SSN Private IP address: {}".format(ssn_conf['instnace_ip']))
-        print("Key name: {}".format(os.environ['conf_key_name']))
-        print("VPC Name: {}".format(ssn_conf['vpc_name']))
-        print("Subnet Name: {}".format(ssn_conf['subnet_name']))
-        print("Security groups Names: {}".format(ssn_conf['security_group_name']))
-        print("SSN instance size: {}".format(os.environ['azure_ssn_instance_size']))
+            logging.info("SSN Private IP address: {}".format(ssn_conf['instnace_ip']))
+        logging.info("Key name: {}".format(os.environ['conf_key_name']))
+        logging.info("VPC Name: {}".format(ssn_conf['vpc_name']))
+        logging.info("Subnet Name: {}".format(ssn_conf['subnet_name']))
+        logging.info("Security groups Names: {}".format(ssn_conf['security_group_name']))
+        logging.info("SSN instance size: {}".format(os.environ['azure_ssn_instance_size']))
         ssn_conf['datalake_store_full_name'] = 'None'
         if os.environ['azure_datalake_enable'] == 'true':
             for datalake in AzureMeta.list_datalakes(ssn_conf['resource_group_name']):
                 if ssn_conf['datalake_store_name'] == datalake.tags["Name"]:
                     ssn_conf['datalake_store_full_name'] = datalake.name
-                    print("DataLake store name: {}".format(ssn_conf['datalake_store_full_name']))
-            print("DataLake shared directory name: {}".format(ssn_conf['datalake_shared_directory_name']))
-        print("Region: {}".format(ssn_conf['region']))
+                    logging.info("DataLake store name: {}".format(ssn_conf['datalake_store_full_name']))
+            logging.info("DataLake shared directory name: {}".format(ssn_conf['datalake_shared_directory_name']))
+        logging.info("Region: {}".format(ssn_conf['region']))
         jenkins_url = "http://{}/jenkins".format(ssn_conf['instance_host'])
         jenkins_url_https = "https://{}/jenkins".format(ssn_conf['instance_host'])
-        print("Jenkins URL: {}".format(jenkins_url))
-        print("Jenkins URL HTTPS: {}".format(jenkins_url_https))
-        print("DataLab UI HTTP URL: http://{}".format(ssn_conf['instance_host']))
-        print("DataLab UI HTTPS URL: https://{}".format(ssn_conf['instance_host']))
+        logging.info("Jenkins URL: {}".format(jenkins_url))
+        logging.info("Jenkins URL HTTPS: {}".format(jenkins_url_https))
+        logging.info("DataLab UI HTTP URL: http://{}".format(ssn_conf['instance_host']))
+        logging.info("DataLab UI HTTPS URL: https://{}".format(ssn_conf['instance_host']))
 
         try:
             with open('jenkins_creds.txt') as f:
-                print(f.read())
+                logging.info(f.read())
         except Exception as err:
-            print('Error: {0}'.format(err))
-            print("Jenkins is either configured already or have issues in configuration routine.")
+            logging.info('Error: {0}'.format(err))
+            logging.info("Jenkins is either configured already or have issues in configuration routine.")
 
         with open("/root/result.json", 'w') as f:
             if os.environ['azure_datalake_enable'] == 'false':
@@ -602,7 +587,7 @@ if __name__ == "__main__":
                        "action": "Create SSN instance"}
             f.write(json.dumps(res))
 
-        print('Upload response file')
+        logging.info('Upload response file')
         params = "--instance_name {} --local_log_filepath {} --os_user {} --instance_hostname {}". \
             format(ssn_conf['instance_name'], local_log_filepath, ssn_conf['datalab_ssh_user'], ssn_conf['instnace_ip'])
         subprocess.run("~/scripts/{}.py {}".format('upload_response_file', params), shell=True, check=True)
diff --git a/infrastructure-provisioning/src/general/scripts/azure/ssn_create_datalake.py b/infrastructure-provisioning/src/general/scripts/azure/ssn_create_datalake.py
index 471d78c..228c40d 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/ssn_create_datalake.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/ssn_create_datalake.py
@@ -27,6 +27,7 @@ import sys
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--datalake_name', type=str, default='')
@@ -42,14 +43,14 @@ if __name__ == "__main__":
         for datalake in AzureMeta().list_datalakes(args.resource_group_name):
             if datalake["Name"] == datalake.tags["Name"]:
                 check_datalake = True
-                print("REQUESTED DATA LAKE {} ALREADY EXISTS".format(datalake.name))
+                logging.info("REQUESTED DATA LAKE {} ALREADY EXISTS".format(datalake.name))
         if not check_datalake:
             datalake_name = id_generator().lower()
-            print("Creating DataLake {}.".format(datalake_name))
+            logging.info("Creating DataLake {}.".format(datalake_name))
             datalake = AzureActions().create_datalake_store(args.resource_group_name, datalake_name, args.region,
                                                             datalake_tags)
-            print("DATA LAKE {} has been created".format(datalake_name))
+            logging.info("DATA LAKE {} has been created".format(datalake_name))
 
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.info('Error: {0}'.format(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/azure/ssn_create_peering.py b/infrastructure-provisioning/src/general/scripts/azure/ssn_create_peering.py
index a827c0a..be956ba 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/ssn_create_peering.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/ssn_create_peering.py
@@ -26,6 +26,7 @@ import sys
 import time
 from datalab.actions_lib import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--source_resource_group_name', type=str, default='')
@@ -49,7 +50,7 @@ if __name__ == "__main__":
             args.source_virtual_network_name,
         ).id
 
-        print("Creating Virtual Network peering {} and {}".format(source_virtual_network_peering_name, destination_virtual_network_peering_name))
+        logging.info("Creating Virtual Network peering {} and {}".format(source_virtual_network_peering_name, destination_virtual_network_peering_name))
         AzureActions().create_virtual_network_peerings(
                 args.source_resource_group_name,
                 args.source_virtual_network_name,
@@ -62,5 +63,5 @@ if __name__ == "__main__":
                 source_vnet_id)
         time.sleep(250)
     except Exception as err:
-        print("Error creating vpc peering: " + str(err))
+        logging.error("Error creating vpc peering: " + str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/azure/ssn_create_resource_group.py b/infrastructure-provisioning/src/general/scripts/azure/ssn_create_resource_group.py
index 4463350..e8e809a 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/ssn_create_resource_group.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/ssn_create_resource_group.py
@@ -24,6 +24,7 @@
 import argparse
 from datalab.actions_lib import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--resource_group_name', type=str, default='')
@@ -33,10 +34,10 @@ args = parser.parse_args()
 if __name__ == "__main__":
     if args.resource_group_name != '':
         if AzureMeta().get_resource_group(args.resource_group_name):
-            print("REQUESTED RESOURCE GROUP {} EXISTS".format(args.resource_group_name))
+            logging.info("REQUESTED RESOURCE GROUP {} EXISTS".format(args.resource_group_name))
         else:
-            print("Creating Resource Group {}".format(args.resource_group_name))
+            logging.info("Creating Resource Group {}".format(args.resource_group_name))
             AzureActions().create_resource_group(args.resource_group_name, args.region)
     else:
-        print("Resource group name can't be empty.")
+        logging.error("Resource group name can't be empty.")
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/azure/ssn_create_vpc.py b/infrastructure-provisioning/src/general/scripts/azure/ssn_create_vpc.py
index 015bace..abfca0d 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/ssn_create_vpc.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/ssn_create_vpc.py
@@ -24,6 +24,7 @@
 import argparse
 from datalab.actions_lib import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--vpc_name', type=str, default='')
@@ -35,10 +36,10 @@ args = parser.parse_args()
 if __name__ == "__main__":
     if args.vpc_name != '':
         if AzureMeta().get_vpc(args.resource_group_name, args.vpc_name):
-            print("REQUESTED VIRTUAL NETWORK {} EXISTS".format(args.vpc_name))
+            logging.info("REQUESTED VIRTUAL NETWORK {} EXISTS".format(args.vpc_name))
         else:
-            print("Creating Virtual Network {}".format(args.vpc_name))
+            logging.info("Creating Virtual Network {}".format(args.vpc_name))
             AzureActions().create_vpc(args.resource_group_name, args.vpc_name, args.region, args.vpc_cidr)
     else:
-        print("VPC name can't be empty.")
+        logging.error("VPC name can't be empty.")
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/azure/ssn_prepare.py b/infrastructure-provisioning/src/general/scripts/azure/ssn_prepare.py
index 38040b6..42a5799 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/ssn_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/ssn_prepare.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -34,11 +34,6 @@ from Crypto.PublicKey import RSA
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}.log".format(os.environ['conf_resource'], os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         AzureMeta = datalab.meta_lib.AzureMeta()
         AzureActions = datalab.actions_lib.AzureActions()
@@ -46,7 +41,7 @@ if __name__ == "__main__":
         ssn_conf['instance'] = 'ssn'
 
         logging.info('[DERIVING NAMES]')
-        print('[DERIVING NAMES]')
+        logging.info('[DERIVING NAMES]')
         # Verify vpc deployment
         if os.environ['conf_network_type'] == 'private' and not os.environ.get('azure_vpc_name') \
                 and not os.environ.get('azure_source_vpc_name'):
@@ -103,10 +98,8 @@ if __name__ == "__main__":
     try:
         if 'azure_resource_group_name' in os.environ:
             logging.info('Resource group predefined')
-            print('Resource group predefined')
         else:
             logging.info('[CREATING RESOURCE GROUP]')
-            print("[CREATING RESOURCE GROUP]")
             params = "--resource_group_name {} --region {}".format(ssn_conf['resource_group_name'], ssn_conf['region'])
             subprocess.run("~/scripts/{}.py {}".format('ssn_create_resource_group', params), shell=True, check=True)
     except Exception as err:
@@ -117,10 +110,8 @@ if __name__ == "__main__":
     try:
         if 'azure_vpc_name' in os.environ:
             logging.info('VPC predefined')
-            print('VPC predefined')
         else:
             logging.info('[CREATING VIRTUAL NETWORK]')
-            print("[CREATING VIRTUAL NETWORK]")
             params = "--resource_group_name {} --vpc_name {} --region {} --vpc_cidr {}".format(
                 ssn_conf['resource_group_name'], ssn_conf['vpc_name'], ssn_conf['region'], ssn_conf['vpc_cidr'])
             subprocess.run("~/scripts/{}.py {}".format('ssn_create_vpc', params), shell=True, check=True)
@@ -137,10 +128,8 @@ if __name__ == "__main__":
     try:
         if 'azure_subnet_name' in os.environ:    
             logging.info('Subnet predefined')
-            print('Subnet predefined')
         else:
             logging.info('[CREATING SUBNET]')
-            print("[CREATING SUBNET]")
             params = "--resource_group_name {} --vpc_name {} --region {} --vpc_cidr {} --subnet_name {} --prefix {}".\
                 format(ssn_conf['resource_group_name'], ssn_conf['vpc_name'], ssn_conf['region'],
                        ssn_conf['vpc_cidr'], ssn_conf['subnet_name'], ssn_conf['subnet_prefix'])
@@ -154,14 +143,13 @@ if __name__ == "__main__":
             if 'azure_resource_group_name' not in os.environ:
                 AzureActions.remove_resource_group(ssn_conf['resource_group_name'], ssn_conf['region'])
         except Exception as err:
-            print("Resources hasn't been removed: {}".format(str(err)))
+            logging.info("Resources hasn't been removed: {}".format(str(err)))
             datalab.fab.append_result("Resources hasn't been removed.", str(err))
         sys.exit(1)
     
     try:
         if 'azure_vpc_name' not in os.environ and os.environ['conf_network_type'] == 'private':
             logging.info('[CREATING VPC PEERING]')
-            print("[CREATING VPC PEERING]")
             params = "--source_resource_group_name {} --destination_resource_group_name {} " \
                      "--source_virtual_network_name {} --destination_virtual_network_name {}".format(
                       ssn_conf['source_resource_group_name'], ssn_conf['resource_group_name'],
@@ -175,7 +163,7 @@ if __name__ == "__main__":
             if 'azure_resource_group_name' not in os.environ:
                 AzureActions.remove_resource_group(ssn_conf['resource_group_name'], ssn_conf['region'])
         except Exception as err:
-            print("Resources hasn't been removed: " + str(err))
+            logging.info("Resources hasn't been removed: " + str(err))
             datalab.fab.append_result("Resources hasn't been removed.", str(err))
         datalab.fab.append_result("Failed to create VPC peering.", str(err))
         sys.exit(1)
@@ -183,10 +171,8 @@ if __name__ == "__main__":
     try:
         if 'azure_security_group_name' in os.environ:
             logging.info('Security group predefined')
-            print('Security group predefined')
         else:
             logging.info('[CREATING SECURITY GROUP]')
-            print("[CREATING SECURITY GROUP]")
             list_rules = [
                 {
                     "name": "in-1",
@@ -249,14 +235,13 @@ if __name__ == "__main__":
             if 'azure_resource_group_name' not in os.environ:
                 AzureActions.remove_resource_group(ssn_conf['resource_group_name'], ssn_conf['region'])
         except Exception as err:
-            print("Resources hasn't been removed: " + str(err))
+            logging.info("Resources hasn't been removed: " + str(err))
             datalab.fab.append_result("Resources hasn't been removed.", str(err))
         sys.exit(1)
 
     if os.environ['azure_datalake_enable'] == 'true':
         try:
             logging.info('[CREATE DATA LAKE STORE]')
-            print('[CREATE DATA LAKE STORE]')
             params = "--datalake_name {} --datalake_tags '{}' --resource_group_name {} --region {}". \
                      format(ssn_conf['datalake_store_name'], json.dumps(ssn_conf['datalake_store_tags']),
                             ssn_conf['resource_group_name'], ssn_conf['region'])
@@ -267,7 +252,6 @@ if __name__ == "__main__":
                 raise Exception
 
             logging.info('[CREATE DATA LAKE SHARED DIRECTORY]')
-            print('[CREATE DATA LAKE SHARED DIRECTORY]')
             params = "--resource_group_name {} --datalake_name {} --directory_name {} --service_base_name {} --ad_group {}". \
                 format(ssn_conf['resource_group_name'], ssn_conf['datalake_store_name'],
                        ssn_conf['datalake_shared_directory_name'], ssn_conf['service_base_name'],
@@ -303,7 +287,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATE SSN INSTANCE]')
-        print('[CREATE SSN INSTANCE]')
         params = "--instance_name {} --instance_size {} --region {} --vpc_name {} --network_interface_name {} \
             --security_group_name {} --subnet_name {} --service_base_name {} --resource_group_name {} \
             --datalab_ssh_user_name {} --public_ip_name {} --public_key '''{}''' --primary_disk_size {} \
@@ -321,7 +304,7 @@ if __name__ == "__main__":
         try:
             AzureActions.remove_instance(ssn_conf['resource_group_name'], ssn_conf['instance_name'])
         except:
-            print("The instance {} hasn't been created".format(ssn_conf['instance_name']))
+            logging.info("The instance {} hasn't been created".format(ssn_conf['instance_name']))
         for datalake in AzureMeta.list_datalakes(ssn_conf['resource_group_name']):
             if ssn_conf['datalake_store_name'] == datalake.tags["Name"]:
                 AzureActions.delete_datalake_store(ssn_conf['resource_group_name'], datalake.name)
diff --git a/infrastructure-provisioning/src/general/scripts/azure/ssn_terminate.py b/infrastructure-provisioning/src/general/scripts/azure/ssn_terminate.py
index 0423aac..865ad37 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/ssn_terminate.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/ssn_terminate.py
@@ -26,7 +26,7 @@ import datalab.fab
 import datalab.meta_lib
 import datalab.ssn_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -34,99 +34,99 @@ from fabric import *
 
 
 def terminate_ssn_node(resource_group_name, service_base_name, vpc_name, region):
-    print("Terminating instances")
+    logging.info("Terminating instances")
     try:
         for vm in AzureMeta.compute_client.virtual_machines.list(resource_group_name):
             if "SBN" in vm.tags and service_base_name == vm.tags["SBN"]:
                 AzureActions.remove_instance(resource_group_name, vm.name)
-                print("Instance {} has been terminated".format(vm.name))
+                logging.info("Instance {} has been terminated".format(vm.name))
     except Exception as err:
         datalab.fab.append_result("Failed to terminate instances", str(err))
         sys.exit(1)
 
-    print("Removing network interfaces")
+    logging.info("Removing network interfaces")
     try:
         for network_interface in AzureMeta.list_network_interfaces(resource_group_name):
             if "SBN" in network_interface.tags and service_base_name == network_interface.tags["SBN"]:
                 AzureActions.delete_network_if(resource_group_name, network_interface.name)
-                print("Network interface {} has been removed".format(network_interface.name))
+                logging.info("Network interface {} has been removed".format(network_interface.name))
     except Exception as err:
         datalab.fab.append_result("Failed to remove network interfaces", str(err))
         sys.exit(1)
 
-    print("Removing static public IPs")
+    logging.info("Removing static public IPs")
     try:
         for static_public_ip in AzureMeta.list_static_ips(resource_group_name):
             if "SBN" in static_public_ip.tags and service_base_name == static_public_ip.tags["SBN"]:
                 AzureActions.delete_static_public_ip(resource_group_name, static_public_ip.name)
-                print("Static public IP {} has been removed".format(static_public_ip.name))
+                logging.info("Static public IP {} has been removed".format(static_public_ip.name))
     except Exception as err:
         datalab.fab.append_result("Failed to remove static IPs", str(err))
         sys.exit(1)
 
-    print("Removing disks")
+    logging.info("Removing disks")
     try:
         for disk in AzureMeta.list_disks(resource_group_name):
             if "SBN" in disk.tags and service_base_name == disk.tags["SBN"]:
                 AzureActions.remove_disk(resource_group_name, disk.name)
-                print("Disk {} has been removed".format(disk.name))
+                logging.info("Disk {} has been removed".format(disk.name))
     except Exception as err:
         datalab.fab.append_result("Failed to remove disks", str(err))
         sys.exit(1)
 
-    print("Removing storage accounts")
+    logging.info("Removing storage accounts")
     try:
         for storage_account in AzureMeta.list_storage_accounts(resource_group_name):
             if "SBN" in storage_account.tags and service_base_name == storage_account.tags["SBN"]:
                 AzureActions.remove_storage_account(resource_group_name, storage_account.name)
-                print("Storage account {} has been terminated".format(storage_account.name))
+                logging.info("Storage account {} has been terminated".format(storage_account.name))
     except Exception as err:
         datalab.fab.append_result("Failed to remove storage accounts", str(err))
         sys.exit(1)
 
-    print("Removing Data Lake Store")
+    logging.info("Removing Data Lake Store")
     try:
         for datalake in AzureMeta.list_datalakes(resource_group_name):
             if "SBN" in datalake.tags and service_base_name == datalake.tags["SBN"]:
                 AzureActions.delete_datalake_store(resource_group_name, datalake.name)
-                print("Data Lake Store {} has been terminated".format(datalake.name))
+                logging.info("Data Lake Store {} has been terminated".format(datalake.name))
     except Exception as err:
         datalab.fab.append_result("Failed to remove Data Lake", str(err))
         sys.exit(1)
 
-    print("Removing images")
+    logging.info("Removing images")
     try:
         for image in AzureMeta.list_images():
             if "SBN" in image.tags and service_base_name == image.tags["SBN"]:
                 AzureActions.remove_image(resource_group_name, image.name)
-                print("Image {} has been removed".format(image.name))
+                logging.info("Image {} has been removed".format(image.name))
     except Exception as err:
         datalab.fab.append_result("Failed to remove images", str(err))
         sys.exit(1)
 
-    print("Removing security groups")
+    logging.info("Removing security groups")
     try:
         for sg in AzureMeta.network_client.network_security_groups.list(resource_group_name):
             if "SBN" in sg.tags and service_base_name == sg.tags["SBN"]:
                 AzureActions.remove_security_group(resource_group_name, sg.name)
-                print("Security group {} has been terminated".format(sg.name))
+                logging.info("Security group {} has been terminated".format(sg.name))
     except Exception as err:
         datalab.fab.append_result("Failed to remove security groups", str(err))
         sys.exit(1)
 
     if 'azure_vpc_name' in os.environ:
-        print("Removing subnets in predefined VPC")
+        logging.info("Removing subnets in predefined VPC")
         try:
             for subnet in AzureMeta.list_subnets(resource_group_name, os.environ['azure_vpc_name']):
                 subnet_name = str(subnet)[str(subnet).find("'name': '") + 9 : str(subnet).find("', 'etag':")]
                 if service_base_name in subnet_name:
                     AzureActions.remove_subnet(resource_group_name, os.environ['azure_vpc_name'], subnet_name)
-                    print("Subnet {} has been removed from VPC {}".format(subnet_name, os.environ['azure_vpc_name']))
+                    logging.info("Subnet {} has been removed from VPC {}".format(subnet_name, os.environ['azure_vpc_name']))
         except Exception as err:
             datalab.fab.append_result("Failed to remove subnets in predefined VPC", str(err))
             sys.exit(1)
 
-    print("Removing rules in predefined edge security group")
+    logging.info("Removing rules in predefined edge security group")
     try:
         if 'azure_edge_security_group_name' in os.environ:
             for rule in AzureMeta.list_security_group_rules(resource_group_name, os.environ['azure_edge_security_group_name']):
@@ -134,31 +134,30 @@ def terminate_ssn_node(resource_group_name, service_base_name, vpc_name, region)
                 if service_base_name in rule_name:
                     AzureActions.remove_security_rules(os.environ['azure_edge_security_group_name'],
                                                resource_group_name, rule_name)
-                    print("Rule {} is removed".format(rule_name))
+                    logging.info("Rule {} is removed".format(rule_name))
     except Exception as err:
         datalab.fab.append_result("Failed to remove rules in predefined edge security group", str(err))
         sys.exit(1)
 
-    print("Removing VPC")
+    logging.info("Removing VPC")
     try:
         if AzureMeta.get_vpc(resource_group_name, service_base_name + '-vpc'):
             AzureActions.remove_vpc(resource_group_name, vpc_name)
-            print("VPC {} has been terminated".format(vpc_name))
+            logging.info("VPC {} has been terminated".format(vpc_name))
     except Exception as err:
         datalab.fab.append_result("Failed to remove VPC", str(err))
         sys.exit(1)
 
-    print("Removing Resource Group")
+    logging.info("Removing Resource Group")
     try:
         if AzureMeta.get_resource_group(resource_group_name) and resource_group_name == '{}-resource-group'.format(service_base_name):
             AzureActions.remove_resource_group(resource_group_name, region)
-            print("Resource group {} has been terminated".format(resource_group_name))
+            logging.info("Resource group {} has been terminated".format(resource_group_name))
     except Exception as err:
         datalab.fab.append_result("Failed to remove resource group", str(err))
         sys.exit(1)
 
     try:
-        print('[KEYCLOAK SSN CLIENT DELETE]')
         logging.info('[KEYCLOAK SSN CLIENT DELETE]')
         keycloak_auth_server_url = '{}/realms/master/protocol/openid-connect/token'.format(
             os.environ['keycloak_auth_server_url'])
@@ -193,19 +192,14 @@ def terminate_ssn_node(resource_group_name, service_base_name, vpc_name, region)
             headers={"Authorization": "Bearer {}".format(keycloak_token.get("access_token")),
                      "Content-Type": "application/json"})
     except Exception as err:
-        print("Failed to remove ssn client from Keycloak", str(err))
+        logging.info("Failed to remove ssn client from Keycloak", str(err))
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}.log".format(os.environ['conf_resource'], os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     # generating variables dictionary
     AzureMeta = datalab.meta_lib.AzureMeta()
     AzureActions = datalab.actions_lib.AzureActions()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     ssn_conf = dict()
     ssn_conf['service_base_name'] = datalab.fab.replace_multi_symbols(os.environ['conf_service_base_name'][:20],
                                                                       '-', True)
@@ -216,7 +210,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[TERMINATE SSN]')
-        print('[TERMINATE SSN]')
         try:
             terminate_ssn_node(ssn_conf['resource_group_name'], ssn_conf['service_base_name'], ssn_conf['vpc_name'],
                                ssn_conf['region'])
@@ -231,7 +224,7 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"service_base_name": ssn_conf['service_base_name'],
                    "Action": "Terminate ssn with all service_base_name environment"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/azure/tensor_configure.py b/infrastructure-provisioning/src/general/scripts/azure/tensor_configure.py
index 137f355..e3f8fec 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/tensor_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/tensor_configure.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -33,14 +33,6 @@ import subprocess
 from fabric import *
 
 if __name__ == "__main__":
-    instance_class = 'notebook'
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     try:
         AzureMeta = datalab.meta_lib.AzureMeta()
         AzureActions = datalab.actions_lib.AzureActions()
@@ -136,7 +128,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             instance_hostname, os.environ['conf_key_dir'] + os.environ['conf_key_name'] + ".pem",
             notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
@@ -154,7 +145,6 @@ if __name__ == "__main__":
     # configuring proxy on Notebook instance
     try:
         logging.info('[CONFIGURE PROXY ON TENSOR INSTANCE]')
-        print('[CONFIGURE PROXY ON TENSOR INSTANCE]')
         additional_config = {"proxy_host": edge_instance_private_hostname, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}"\
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
@@ -172,7 +162,6 @@ if __name__ == "__main__":
     # updating repositories & installing python packages
     try:
         logging.info('[INSTALLING PREREQUISITES TO TENSOR NOTEBOOK INSTANCE]')
-        print('[INSTALLING PREREQUISITES TO TENSOR NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}". \
             format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['azure_region'],
                    edge_instance_private_hostname)
@@ -189,7 +178,6 @@ if __name__ == "__main__":
     # installing and configuring TensorFlow and all dependencies
     try:
         logging.info('[CONFIGURE TENSORFLOW NOTEBOOK INSTANCE]')
-        print('[CONFIGURE TENSORFLOW NOTEBOOK INSTANCE]')
         params = "--hostname {0} --keyfile {1} " \
                  "--region {2} --os_user {3} " \
                  "--ip_address {4} --exploratory_name {5} --edge_ip {6}" \
@@ -209,7 +197,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": notebook_config['user_keyname'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -226,7 +213,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
@@ -242,7 +228,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[POST CONFIGURING PROCESS]')
-        print('[POST CONFIGURING PROCESS')
         if notebook_config['notebook_image_name'] not in [notebook_config['expected_image_name'], 'None', '']:
             params = "--hostname {} --keyfile {} --os_user {} --resource_group_name {} --notebook_name {}" \
                 .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
@@ -259,11 +244,11 @@ if __name__ == "__main__":
 
     if notebook_config['image_enabled'] == 'true':
         try:
-            print('[CREATING IMAGE]')
+            logging.info('[CREATING IMAGE]')
             image = AzureMeta.get_image(notebook_config['resource_group_name'],
                                         notebook_config['expected_image_name'])
             if image == '':
-                print("Looks like it's first time we configure notebook server. Creating image.")
+                logging.info("Looks like it's first time we configure notebook server. Creating image.")
                 datalab.actions_lib.prepare_vm_for_image(True, notebook_config['datalab_ssh_user'], instance_hostname,
                                                          keyfile_name)
                 AzureActions.create_image_from_instance(notebook_config['resource_group_name'],
@@ -271,7 +256,7 @@ if __name__ == "__main__":
                                                         os.environ['azure_region'],
                                                         notebook_config['expected_image_name'],
                                                         json.dumps(notebook_config['image_tags']))
-                print("Image was successfully created.")
+                logging.info("Image was successfully created.")
                 subprocess.run("~/scripts/{}.py".format('common_prepare_notebook'), shell=True, check=True)
                 instance_running = False
                 while not instance_running:
@@ -295,7 +280,6 @@ if __name__ == "__main__":
             sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         additional_info = {
             'instance_hostname': instance_hostname,
@@ -336,19 +320,18 @@ if __name__ == "__main__":
             notebook_config['exploratory_name'])
         jupyter_ungit_access_url = "https://" + edge_instance_hostname + "/{}-ungit/".format(
             notebook_config['exploratory_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['instance_name']))
-        print("Private IP: {}".format(ip_address))
-        print("Instance type: {}".format(notebook_config['instance_size']))
-        print("Key name: {}".format(notebook_config['key_name']))
-        print("User key name: {}".format(notebook_config['user_keyname']))
-        print("SG name: {}".format(notebook_config['security_group_name']))
-        print("TensorBoard URL: {}".format(tensorboard_url))
-        print("TensorBoard log dir: /var/log/tensorboard")
-        print("Jupyter URL: {}".format(jupyter_ip_url))
-        print("Ungit URL: {}".format(ungit_ip_url))
-        print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.
+        logging.info("Instance name: {}".format(notebook_config['instance_name']))
+        logging.info("Private IP: {}".format(ip_address))
+        logging.info("Instance type: {}".format(notebook_config['instance_size']))
+        logging.info("Key name: {}".format(notebook_config['key_name']))
+        logging.info("User key name: {}".format(notebook_config['user_keyname']))
+        logging.info("SG name: {}".format(notebook_config['security_group_name']))
+        logging.info("TensorBoard URL: {}".format(tensorboard_url))
+        logging.info("TensorBoard log dir: /var/log/tensorboard")
+        logging.info("Jupyter URL: {}".format(jupyter_ip_url))
+        logging.info("Ungit URL: {}".format(ungit_ip_url))
+        logging.info('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.
               format(notebook_config['key_name'], notebook_config['datalab_ssh_user'], ip_address))
 
         with open("/root/result.json", 'w') as result:
diff --git a/infrastructure-provisioning/src/general/scripts/azure/zeppelin_configure.py b/infrastructure-provisioning/src/general/scripts/azure/zeppelin_configure.py
index 536955b..c2b9b01 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/zeppelin_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/zeppelin_configure.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -34,12 +34,6 @@ from fabric import *
 
 if __name__ == "__main__":
     instance_class = 'notebook'
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         AzureMeta = datalab.meta_lib.AzureMeta()
         AzureActions = datalab.actions_lib.AzureActions()
@@ -135,7 +129,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             instance_hostname, os.environ['conf_key_dir'] + os.environ['conf_key_name'] + ".pem",
             notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
@@ -153,7 +146,6 @@ if __name__ == "__main__":
     # configuring proxy on Notebook instance
     try:
         logging.info('[CONFIGURE PROXY ON ZEPPELIN INSTANCE]')
-        print('[CONFIGURE PROXY ON ZEPPELIN INSTANCE]')
         additional_config = {"proxy_host": edge_instance_private_hostname, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}" \
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
@@ -171,7 +163,6 @@ if __name__ == "__main__":
     # updating repositories & installing python packages
     try:
         logging.info('[INSTALLING PREREQUISITES TO ZEPPELIN NOTEBOOK INSTANCE]')
-        print('[INSTALLING PREREQUISITES TO ZEPPELIN NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}" \
             .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['azure_region'],
                     edge_instance_private_hostname)
@@ -188,7 +179,6 @@ if __name__ == "__main__":
     # installing and configuring zeppelin and all dependencies
     try:
         logging.info('[CONFIGURE ZEPPELIN NOTEBOOK INSTANCE]')
-        print('[CONFIGURE ZEPPELIN NOTEBOOK INSTANCE]')
         additional_config = {"frontend_hostname": edge_instance_private_hostname,
                              "backend_hostname": instance_hostname,
                              "backend_port": "8080",
@@ -222,7 +212,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": notebook_config['user_keyname'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -239,7 +228,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
@@ -255,7 +243,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[POST CONFIGURING PROCESS]')
-        print('[POST CONFIGURING PROCESS')
         if notebook_config['notebook_image_name'] not in [notebook_config['expected_image_name'], 'None', '']:
             params = "--hostname {} --keyfile {} --os_user {} --resource_group_name {} --notebook_name {}" \
                 .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
@@ -272,11 +259,11 @@ if __name__ == "__main__":
 
     if notebook_config['image_enabled'] == 'true':
         try:
-            print('[CREATING IMAGE]')
+            logging.info('[CREATING IMAGE]')
             image = AzureMeta.get_image(notebook_config['resource_group_name'],
                                         notebook_config['expected_image_name'])
             if image == '':
-                print("Looks like it's first time we configure notebook server. Creating image.")
+                logging.info("Looks like it's first time we configure notebook server. Creating image.")
                 datalab.actions_lib.prepare_vm_for_image(True, notebook_config['datalab_ssh_user'], instance_hostname,
                                                          keyfile_name)
                 AzureActions.create_image_from_instance(notebook_config['resource_group_name'],
@@ -284,7 +271,7 @@ if __name__ == "__main__":
                                                         os.environ['azure_region'],
                                                         notebook_config['expected_image_name'],
                                                         json.dumps(notebook_config['image_tags']))
-                print("Image was successfully created.")
+                logging.info("Image was successfully created.")
                 subprocess.run("~/scripts/{}.py".format('common_prepare_notebook'), shell=True, check=True)
                 instance_running = False
                 while not instance_running:
@@ -308,7 +295,6 @@ if __name__ == "__main__":
             sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         additional_info = {
             'instance_hostname': instance_hostname,
@@ -346,17 +332,16 @@ if __name__ == "__main__":
             notebook_config['exploratory_name'])
         zeppelin_ungit_access_url = "https://" + edge_instance_hostname + "/{}-ungit/".format(
             notebook_config['exploratory_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['instance_name']))
-        print("Private IP: {}".format(ip_address))
-        print("Instance type: {}".format(notebook_config['instance_size']))
-        print("Key name: {}".format(notebook_config['key_name']))
-        print("User key name: {}".format(notebook_config['user_keyname']))
-        print("SG name: {}".format(notebook_config['security_group_name']))
-        print("Zeppelin URL: {}".format(zeppelin_ip_url))
-        print("Ungit URL: {}".format(ungit_ip_url))
-        print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.
+        logging.info("Instance name: {}".format(notebook_config['instance_name']))
+        logging.info("Private IP: {}".format(ip_address))
+        logging.info("Instance type: {}".format(notebook_config['instance_size']))
+        logging.info("Key name: {}".format(notebook_config['key_name']))
+        logging.info("User key name: {}".format(notebook_config['user_keyname']))
+        logging.info("SG name: {}".format(notebook_config['security_group_name']))
+        logging.info("Zeppelin URL: {}".format(zeppelin_ip_url))
+        logging.info("Ungit URL: {}".format(ungit_ip_url))
+        logging.info('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.
               format(notebook_config['key_name'], notebook_config['datalab_ssh_user'], ip_address))
 
         with open("/root/result.json", 'w') as result:

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org