You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@datalab.apache.org by lf...@apache.org on 2021/10/11 15:00:21 UTC

[incubator-datalab] 03/04: [DATALAB-2409]: replaced print with logging in all general/scripts/gcp .py scripts

This is an automated email from the ASF dual-hosted git repository.

lfrolov pushed a commit to branch DATALAB-2409
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git

commit d9593a5667945fadbbbc6727b01cb0e789ca4624
Author: leonidfrolov <fr...@gmail.com>
AuthorDate: Mon Oct 11 17:51:48 2021 +0300

    [DATALAB-2409]: replaced print with logging in all general/scripts/gcp .py scripts
---
 .../src/general/scripts/gcp/common_collect_data.py |  5 ++-
 .../general/scripts/gcp/common_create_bucket.py    |  5 ++-
 .../general/scripts/gcp/common_create_firewall.py  |  9 ++--
 .../general/scripts/gcp/common_create_instance.py  |  5 ++-
 .../general/scripts/gcp/common_create_nat_route.py |  5 ++-
 .../scripts/gcp/common_create_notebook_image.py    |  9 ++--
 .../scripts/gcp/common_create_service_account.py   | 15 ++++---
 .../general/scripts/gcp/common_create_subnet.py    |  9 ++--
 .../scripts/gcp/common_download_git_certfile.py    |  5 ++-
 .../src/general/scripts/gcp/common_install_gpu.py  |  5 ++-
 ...common_notebook_configure_dataengine-service.py | 15 ++-----
 .../gcp/common_notebook_configure_dataengine.py    | 15 ++-----
 .../general/scripts/gcp/common_prepare_notebook.py | 22 +++------
 .../src/general/scripts/gcp/common_reupload_key.py |  3 +-
 .../general/scripts/gcp/common_start_notebook.py   | 22 +++------
 .../general/scripts/gcp/common_stop_notebook.py    | 32 +++++--------
 .../scripts/gcp/common_terminate_notebook.py       | 29 +++++-------
 .../scripts/gcp/dataengine-service_configure.py    | 44 +++++++-----------
 .../scripts/gcp/dataengine-service_create.py       |  5 ++-
 .../scripts/gcp/dataengine-service_install_libs.py | 14 ++----
 .../scripts/gcp/dataengine-service_list_libs.py    | 13 ++----
 .../scripts/gcp/dataengine-service_prepare.py      | 15 ++-----
 .../scripts/gcp/dataengine-service_terminate.py    | 22 +++------
 .../general/scripts/gcp/dataengine_configure.py    | 41 +++++------------
 .../src/general/scripts/gcp/dataengine_prepare.py  | 21 +++------
 .../src/general/scripts/gcp/dataengine_start.py    | 16 ++-----
 .../src/general/scripts/gcp/dataengine_stop.py     | 15 ++-----
 .../general/scripts/gcp/dataengine_terminate.py    | 17 +++----
 .../general/scripts/gcp/deeplearning_configure.py  | 46 +++++++------------
 .../src/general/scripts/gcp/edge_configure.py      | 41 ++++++-----------
 .../general/scripts/gcp/edge_create_static_ip.py   | 13 ++----
 .../src/general/scripts/gcp/edge_start.py          | 23 +++-------
 .../src/general/scripts/gcp/edge_status.py         | 15 ++-----
 .../src/general/scripts/gcp/edge_stop.py           | 14 ++----
 .../src/general/scripts/gcp/edge_terminate.py      | 32 +++++--------
 .../src/general/scripts/gcp/jupyter_configure.py   | 48 +++++++-------------
 .../general/scripts/gcp/jupyterlab_configure.py    | 48 +++++++-------------
 .../src/general/scripts/gcp/project_prepare.py     | 36 +++++----------
 .../src/general/scripts/gcp/project_terminate.py   | 43 +++++++-----------
 .../src/general/scripts/gcp/rstudio_configure.py   | 48 +++++++-------------
 .../rstudio_dataengine-service_create_configs.py   |  5 ++-
 .../src/general/scripts/gcp/ssn_configure.py       | 52 ++++++++--------------
 .../general/scripts/gcp/ssn_create_static_ip.py    | 12 ++---
 .../src/general/scripts/gcp/ssn_create_vpc.py      |  7 +--
 .../src/general/scripts/gcp/ssn_finalize.py        |  3 +-
 .../src/general/scripts/gcp/ssn_prepare.py         | 22 +++------
 .../src/general/scripts/gcp/ssn_terminate.py       | 15 ++-----
 .../scripts/gcp/ssn_terminate_gcp_resources.py     | 45 ++++++++++---------
 .../src/general/scripts/gcp/superset_configure.py  | 52 ++++++++--------------
 .../scripts/gcp/tensor-rstudio_configure.py        | 50 ++++++++-------------
 .../src/general/scripts/gcp/tensor_configure.py    | 48 +++++++-------------
 .../src/general/scripts/gcp/zeppelin_configure.py  | 42 ++++++-----------
 52 files changed, 420 insertions(+), 773 deletions(-)

diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_collect_data.py b/infrastructure-provisioning/src/general/scripts/gcp/common_collect_data.py
index 02a3dcb..1f8527e 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_collect_data.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_collect_data.py
@@ -29,6 +29,7 @@ import traceback
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -54,13 +55,13 @@ if __name__ == "__main__":
             data_instances = GCPMeta().get_list_instance_statuses(id_hosts)
             statuses['host'] = data_instances
         except:
-            print("Hosts JSON wasn't been provided")
+            logging.error("Hosts JSON wasn't been provided")
         try:
             id_clusters = get_id_resourses(data.get('cluster'))
             data_clusters = GCPMeta().get_list_cluster_statuses(id_clusters, full_check=False)
             statuses['cluster'] = data_clusters
         except:
-            print("Clusters JSON wasn't been provided")
+            logging.error("Clusters JSON wasn't been provided")
         with open('/root/result.json', 'w') as outfile:
             json.dump(statuses, outfile)
     except Exception as err:
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_create_bucket.py b/infrastructure-provisioning/src/general/scripts/gcp/common_create_bucket.py
index 061746a..34352e5 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_create_bucket.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_create_bucket.py
@@ -26,6 +26,7 @@ import json
 import sys
 from datalab.actions_lib import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--bucket_name', type=str, default='')
@@ -35,9 +36,9 @@ args = parser.parse_args()
 if __name__ == "__main__":
     if args.bucket_name:
         if GCPMeta().get_bucket(args.bucket_name):
-            print("REQUESTED BUCKET {} ALREADY EXISTS".format(args.bucket_name))
+            logging.info("REQUESTED BUCKET {} ALREADY EXISTS".format(args.bucket_name))
         else:
-            print("Creating Bucket {}".format(args.bucket_name))
+            logging.info("Creating Bucket {}".format(args.bucket_name))
             GCPActions().create_bucket(args.bucket_name)
             GCPActions().add_bucket_labels(args.bucket_name, json.loads(args.tags))
     else:
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_create_firewall.py b/infrastructure-provisioning/src/general/scripts/gcp/common_create_firewall.py
index aa126c5..2ef8b7b 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_create_firewall.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_create_firewall.py
@@ -26,6 +26,7 @@ import json
 import sys
 from datalab.actions_lib import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--firewall', type=str)
@@ -36,15 +37,15 @@ if __name__ == "__main__":
     if firewall:
         for firewall_rule in firewall['ingress']:
             if GCPMeta().get_firewall(firewall_rule['name']):
-                print("REQUESTED INGRESS FIREWALL {} ALREADY EXISTS".format(firewall_rule['name']))
+                logging.info("REQUESTED INGRESS FIREWALL {} ALREADY EXISTS".format(firewall_rule['name']))
             else:
-                print("Creating Ingress Firewall {}".format(firewall_rule['name']))
+                logging.info("Creating Ingress Firewall {}".format(firewall_rule['name']))
                 GCPActions().create_firewall(firewall_rule)
         for firewall_rule in firewall['egress']:
             if GCPMeta().get_firewall(firewall_rule['name']):
-                print("REQUESTED EGRESS FIREWALL {} ALREADY EXISTS".format(firewall_rule['name']))
+                logging.info("REQUESTED EGRESS FIREWALL {} ALREADY EXISTS".format(firewall_rule['name']))
             else:
-                print("Creating Egress Firewall {}".format(firewall_rule['name']))
+                logging.info("Creating Egress Firewall {}".format(firewall_rule['name']))
                 GCPActions().create_firewall(firewall_rule)
     else:
         parser.print_help()
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_create_instance.py b/infrastructure-provisioning/src/general/scripts/gcp/common_create_instance.py
index b62f882..d780b44 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_create_instance.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_create_instance.py
@@ -26,6 +26,7 @@ import json
 import sys
 from datalab.actions_lib import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--instance_name', type=str, default='')
@@ -55,9 +56,9 @@ args = parser.parse_args()
 if __name__ == "__main__":
     if args.instance_name:
         if GCPMeta().get_instance(args.instance_name):
-            print("REQUESTED INSTANCE {} ALREADY EXISTS".format(args.instance_name))
+            logging.info("REQUESTED INSTANCE {} ALREADY EXISTS".format(args.instance_name))
         else:
-            print("Creating Instance {}".format(args.instance_name))
+            logging.info("Creating Instance {}".format(args.instance_name))
             GCPActions().create_instance(args.instance_name, args.service_base_name, args.cluster_name, args.region, args.zone,
                                          args.vpc_name, args.subnet_name,
                                          args.instance_size, args.ssh_key_path, args.initial_user, args.image_name,
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_create_nat_route.py b/infrastructure-provisioning/src/general/scripts/gcp/common_create_nat_route.py
index d9a5f0b..f1d49fb 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_create_nat_route.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_create_nat_route.py
@@ -25,6 +25,7 @@ import argparse
 import sys
 from datalab.actions_lib import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--nat_route_name', type=str)
@@ -35,9 +36,9 @@ args = parser.parse_args()
 
 if __name__ == "__main__":
     if GCPMeta().get_route(args.nat_route_name):
-        print("REQUESTED ROUTE {} ALREADY EXISTS".format(args.nat_route_name))
+        logging.info("REQUESTED ROUTE {} ALREADY EXISTS".format(args.nat_route_name))
     else:
-        print("Creating NAT ROUTE {}".format(args.nat_route_name))
+        logging.info("Creating NAT ROUTE {}".format(args.nat_route_name))
         params = {
             "destRange": "0.0.0.0/0",
             "name": args.nat_route_name,
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_create_notebook_image.py b/infrastructure-provisioning/src/general/scripts/gcp/common_create_notebook_image.py
index f7ba4fd..1be0d2e 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_create_notebook_image.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_create_notebook_image.py
@@ -24,6 +24,7 @@
 import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
+from datalab.logger import logging
 import json
 import os
 import sys
@@ -63,19 +64,19 @@ if __name__ == "__main__":
                                                                        image_conf['endpoint_name'],
                                                                        image_conf['exploratory_name'])
         image_conf['zone'] = os.environ['gcp_zone']
-        print('[CREATING IMAGE]')
+        logging.info('[CREATING IMAGE]')
         primary_image_id = GCPMeta.get_image_by_name(image_conf['expected_primary_image_name'])
         if primary_image_id == '':
             image_id_list = GCPActions.create_image_from_instance_disks(
                 image_conf['expected_primary_image_name'], image_conf['expected_secondary_image_name'],
                 image_conf['instance_name'], image_conf['zone'], image_conf['image_labels'])
             if image_id_list and image_id_list[0] != '':
-                print("Image of primary disk was successfully created. It's ID is {}".format(image_id_list[0]))
+                logging.info("Image of primary disk was successfully created. It's ID is {}".format(image_id_list[0]))
             else:
-                print("Looks like another image creating operation for your template have been started a "
+                logging.info("Looks like another image creating operation for your template have been started a "
                       "moment ago.")
             if image_id_list and image_id_list[1] != '':
-                print("Image of secondary disk was successfully created. It's ID is {}".format(image_id_list[1]))
+                logging.info("Image of secondary disk was successfully created. It's ID is {}".format(image_id_list[1]))
 
             with open("/root/result.json", 'w') as result:
                 res = {"primary_image_name": image_conf['expected_primary_image_name'],
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_create_service_account.py b/infrastructure-provisioning/src/general/scripts/gcp/common_create_service_account.py
index d1e9a55..44f33ee 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_create_service_account.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_create_service_account.py
@@ -25,6 +25,7 @@ import argparse
 import sys
 from datalab.actions_lib import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--service_account_name', type=str, default='')
@@ -39,16 +40,16 @@ args = parser.parse_args()
 if __name__ == "__main__":
     if args.service_account_name != '':
         if GCPMeta().get_service_account(args.service_account_name, args.service_base_name):
-            print("REQUESTED SERVICE ACCOUNT {} ALREADY EXISTS".format(args.service_account_name))
+            logging.info("REQUESTED SERVICE ACCOUNT {} ALREADY EXISTS".format(args.service_account_name))
         else:
-            print("Creating Service account {}".format(args.service_account_name))
+            logging.info("Creating Service account {}".format(args.service_account_name))
             GCPActions().create_service_account(args.service_account_name, args.service_base_name, args.unique_index)
             if GCPMeta().get_role(args.role_name):
                 if GCPMeta().get_role_status(args.role_name) == True:
-                    print('Restoring deleted role')
+                    logging.info('Restoring deleted role')
                     GCPActions().undelete_role(args.role_name)
                 else:
-                    print("REQUESTED ROLE {} ALREADY EXISTS".format(args.role_name))
+                    logging.info("REQUESTED ROLE {} ALREADY EXISTS".format(args.role_name))
             else:
                 if args.policy_path == '':
                     permissions = []
@@ -56,12 +57,12 @@ if __name__ == "__main__":
                     with open(args.policy_path, 'r') as f:
                         json_file = f.read()
                     permissions = json.loads(json_file)
-                print("Creating Role {}".format(args.role_name))
+                logging.info("Creating Role {}".format(args.role_name))
                 GCPActions().create_role(args.role_name, permissions)
-            print("Assigning custom role to Service account.")
+            logging.info("Assigning custom role to Service account.")
             GCPActions().set_role_to_service_account(args.service_account_name, args.role_name, args.service_base_name)
             if args.roles_path != '':
-                print("Assigning predefined roles to Service account.")
+                logging.info("Assigning predefined roles to Service account.")
                 with open(args.roles_path, 'r') as f:
                     json_file = f.read()
                 predefined_roles = json.loads(json_file)
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_create_subnet.py b/infrastructure-provisioning/src/general/scripts/gcp/common_create_subnet.py
index 194e108..1153aad 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_create_subnet.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_create_subnet.py
@@ -26,6 +26,7 @@ import ipaddress
 import sys
 from datalab.actions_lib import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--subnet_name', type=str, default='')
@@ -105,17 +106,17 @@ if __name__ == "__main__":
             existed_subnet_list.append(GCPMeta().get_subnet(subnet.split('/')[-1], args.region)['ipCidrRange'])
         available_subnets = list(set(pre_defined_subnet_list) - set(existed_subnet_list))
         if not available_subnets:
-            print("There is no available subnet to create. Aborting...")
+            logging.info("There is no available subnet to create. Aborting...")
             sys.exit(1)
         else:
             datalab_subnet_cidr = available_subnets[0]
 
     if args.subnet_name != '':
         if GCPMeta().get_subnet(args.subnet_name, args.region):
-            print("REQUESTED SUBNET {} ALREADY EXISTS".format(args.subnet_name))
+            logging.info("REQUESTED SUBNET {} ALREADY EXISTS".format(args.subnet_name))
         else:
-            print("Creating Subnet {}".format(args.subnet_name))
+            logging.info("Creating Subnet {}".format(args.subnet_name))
             GCPActions().create_subnet(args.subnet_name, datalab_subnet_cidr, args.vpc_selflink, args.region)
     else:
-        print("Subnet name can't be empty")
+        logging.info("Subnet name can't be empty")
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_download_git_certfile.py b/infrastructure-provisioning/src/general/scripts/gcp/common_download_git_certfile.py
index c2a3644..7256d4e 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_download_git_certfile.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_download_git_certfile.py
@@ -25,6 +25,7 @@ import argparse
 import os
 from datalab.actions_lib import *
 from fabric import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--keyfile', type=str, default='')
@@ -42,8 +43,8 @@ if __name__ == "__main__":
     if GCPActions().get_gitlab_cert(bucket_name, gitlab_certfile):
         conn.put(gitlab_certfile, gitlab_certfile)
         conn.sudo('chown root:root {}'.format(gitlab_certfile))
-        print('{} has been downloaded'.format(gitlab_certfile))
+        logging.info('{} has been downloaded'.format(gitlab_certfile))
     else:
-        print('There is no {} to download'.format(gitlab_certfile))
+        logging.info('There is no {} to download'.format(gitlab_certfile))
 
     conn.close()
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_install_gpu.py b/infrastructure-provisioning/src/general/scripts/gcp/common_install_gpu.py
index 733236d..4f85b9b 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_install_gpu.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_install_gpu.py
@@ -26,6 +26,7 @@ import os
 import sys
 from datalab.fab import *
 from datalab.notebook_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--hostname', type=str, default='')
@@ -37,11 +38,11 @@ args = parser.parse_args()
 # Run script #
 ##############
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     global conn
     conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
 
-    print('Installing GPU drivers')
+    logging.info('Installing GPU drivers')
     install_nvidia_drivers(args.os_user)
 
     conn.close()
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_notebook_configure_dataengine-service.py b/infrastructure-provisioning/src/general/scripts/gcp/common_notebook_configure_dataengine-service.py
index 7273709..3305eb5 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_notebook_configure_dataengine-service.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_notebook_configure_dataengine-service.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -41,17 +41,10 @@ def clear_resources():
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     # generating variables dictionary
     GCPMeta = datalab.meta_lib.GCPMeta()
     GCPActions = datalab.actions_lib.GCPActions()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     notebook_config = dict()
     notebook_config['service_base_name'] = (os.environ['conf_service_base_name'])
     notebook_config['notebook_name'] = os.environ['notebook_instance_name']
@@ -97,7 +90,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[INSTALLING KERNELS INTO SPECIFIED NOTEBOOK]')
-        print('[INSTALLING KERNELS INTO SPECIFIED NOTEBOOK]')
         params = "--bucket {} --cluster_name {} --dataproc_version {} --keyfile {} --notebook_ip {} --region {} " \
                  "--edge_user_name {} --project_name {} --os_user {}  --edge_hostname {} --proxy_port {} " \
                  "--scala_version {} --application {}" \
@@ -118,7 +110,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[UPDATING SPARK CONFIGURATION FILES ON NOTEBOOK]')
-        print('[UPDATING SPARK CONFIGURATION FILES ON NOTEBOOK]')
         params = "--hostname {0} " \
                  "--keyfile {1} " \
                  "--os_user {2} " \
@@ -140,7 +131,7 @@ if __name__ == "__main__":
             res = {"notebook_name": notebook_config['notebook_name'],
                    "Tag_name": notebook_config['tag_name'],
                    "Action": "Configure notebook server"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_notebook_configure_dataengine.py b/infrastructure-provisioning/src/general/scripts/gcp/common_notebook_configure_dataengine.py
index c6e2d7a..6f79458 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_notebook_configure_dataengine.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_notebook_configure_dataengine.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -41,18 +41,11 @@ def clear_resources():
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     try:
         GCPMeta = datalab.meta_lib.GCPMeta()
         GCPActions = datalab.actions_lib.GCPActions()
         # generating variables dictionary
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         notebook_config = dict()
         if 'exploratory_name' in os.environ:
             notebook_config['exploratory_name'] = os.environ['exploratory_name'].replace('_', '-').lower()
@@ -93,7 +86,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[INSTALLING KERNELS INTO SPECIFIED NOTEBOOK]')
-        print('[INSTALLING KERNELS INTO SPECIFIED NOTEBOOK]')
         params = "--cluster_name {0} --spark_version {1} --hadoop_version {2} --os_user {3} --spark_master {4}" \
                  " --keyfile {5} --notebook_ip {6} --spark_master_ip {7}".\
             format(notebook_config['cluster_name'], os.environ['notebook_spark_version'],
@@ -112,7 +104,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[UPDATING SPARK CONFIGURATION FILES ON NOTEBOOK]')
-        print('[UPDATING SPARK CONFIGURATION FILES ON NOTEBOOK]')
         params = "--hostname {0} " \
                  "--keyfile {1} " \
                  "--os_user {2} " \
@@ -135,7 +126,7 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"notebook_name": notebook_config['notebook_name'],
                    "Action": "Configure notebook server"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_prepare_notebook.py b/infrastructure-provisioning/src/general/scripts/gcp/common_prepare_notebook.py
index 96d1a3b..dbacfab 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_prepare_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_prepare_notebook.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -34,16 +34,10 @@ from fabric import *
 
 if __name__ == "__main__":
     instance_class = 'notebook'
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         GCPMeta = datalab.meta_lib.GCPMeta()
         GCPActions = datalab.actions_lib.GCPActions()
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         notebook_config = dict()
         notebook_config['service_base_name'] = (os.environ['conf_service_base_name'])
         notebook_config['edge_user_name'] = (os.environ['edge_user_name'])
@@ -59,7 +53,6 @@ if __name__ == "__main__":
                                                                             notebook_config['endpoint_tag']))
         if edge_status != 'RUNNING':
             logging.info('ERROR: Edge node is unavailable! Aborting...')
-            print('ERROR: Edge node is unavailable! Aborting...')
             ssn_hostname = GCPMeta.get_private_ip_address(notebook_config['service_base_name'] + '-ssn')
             datalab.fab.put_resource_status('edge', 'Unavailable', os.environ['ssn_datalab_path'],
                                             os.environ['conf_os_user'],
@@ -118,7 +111,7 @@ if __name__ == "__main__":
             notebook_config['service_base_name'], notebook_config['project_name'], notebook_config['endpoint_name'],
             os.environ['application'], os.environ['notebook_image_name'].replace('_', '-').lower()) if (x != 'None' and x != '')
             else notebook_config['expected_primary_image_name'])(str(os.environ.get('notebook_image_name')))
-        print('Searching pre-configured images')
+        logging.info('Searching pre-configured images')
 
         deeplearning_ami = 'false'
 
@@ -131,7 +124,7 @@ if __name__ == "__main__":
         if notebook_config['primary_image_name'] == '':
             notebook_config['primary_image_name'] = os.environ['gcp_{}_image_name'.format(os.environ['conf_os_family'])]
         else:
-            print('Pre-configured primary image found. Using: {}'.format(
+            logging.info('Pre-configured primary image found. Using: {}'.format(
                 notebook_config['primary_image_name'].get('name')))
             if deeplearning_ami == 'true':
                 notebook_config['primary_image_name'] = 'projects/deeplearning-platform-release/global/images/{}'.format(
@@ -150,7 +143,7 @@ if __name__ == "__main__":
         if notebook_config['secondary_image_name'] == '':
             notebook_config['secondary_image_name'] = 'None'
         else:
-            print('Pre-configured secondary image found. Using: {}'.format(
+            logging.info('Pre-configured secondary image found. Using: {}'.format(
                 notebook_config['secondary_image_name'].get('name')))
             notebook_config['secondary_image_name'] = 'global/images/{}'.format(
                 notebook_config['secondary_image_name'].get('name'))
@@ -173,11 +166,11 @@ if __name__ == "__main__":
             data = {"notebook_name": notebook_config['instance_name'], "error": ""}
             json.dump(data, f)
 
-        print('Additional tags will be added: {}'.format(os.environ['tags']))
+        logging.info('Additional tags will be added: {}'.format(os.environ['tags']))
         additional_tags = os.environ['tags'].replace("': '", ":").replace("', '", ",").replace("{'", "" ).replace(
             "'}", "").lower()
 
-        print('Additional tags will be added: {}'.format(additional_tags))
+        logging.info('Additional tags will be added: {}'.format(additional_tags))
         notebook_config['labels'] = {"name": notebook_config['instance_name'],
                                      "sbn": notebook_config['service_base_name'],
                                      "product": "datalab"
@@ -196,7 +189,6 @@ if __name__ == "__main__":
     # launching instance for notebook server
     try:
         logging.info('[CREATE NOTEBOOK INSTANCE]')
-        print('[CREATE NOTEBOOK INSTANCE]')
         params = "--instance_name {0} --region {1} --zone {2} --vpc_name {3} --subnet_name {4} --instance_size {5} " \
                  "--ssh_key_path {6} --initial_user {7} --service_account_name {8} --image_name {9} " \
                  "--secondary_image_name {10} --instance_class {11} --primary_disk_size {12} " \
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_reupload_key.py b/infrastructure-provisioning/src/general/scripts/gcp/common_reupload_key.py
index 0119977..e50061f 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_reupload_key.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_reupload_key.py
@@ -27,6 +27,7 @@ import subprocess
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -48,5 +49,5 @@ if __name__ == "__main__":
         try:
             subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except Exception as err:
-            print('Error: {0}'.format(err))
+            logging.error('Error: {0}'.format(err))
             sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_start_notebook.py b/infrastructure-provisioning/src/general/scripts/gcp/common_start_notebook.py
index fe45998..f63d6fb 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_start_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_start_notebook.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -33,16 +33,10 @@ import subprocess
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     # generating variables dictionary
     GCPMeta = datalab.meta_lib.GCPMeta()
     GCPActions = datalab.actions_lib.GCPActions()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     notebook_config = dict()
     notebook_config['service_base_name'] = (os.environ['conf_service_base_name'])
     notebook_config['notebook_name'] = os.environ['notebook_instance_name']
@@ -50,9 +44,8 @@ if __name__ == "__main__":
 
     try:
         logging.info('[START NOTEBOOK]')
-        print('[START NOTEBOOK]')
         try:
-            print("Starting notebook")
+            logging.info("Starting notebook")
             GCPActions.start_instance(notebook_config['notebook_name'], notebook_config['zone'])
         except Exception as err:
             traceback.print_exc()
@@ -63,7 +56,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[SETUP USER GIT CREDENTIALS]')
-        print('[SETUP USER GIT CREDENTIALS]')
         notebook_config['notebook_ip'] = GCPMeta.get_private_ip_address(notebook_config['notebook_name'])
         notebook_config['keyfile'] = '{0}{1}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
@@ -79,7 +71,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[UPDATE LAST ACTIVITY TIME]')
-        print('[UPDATE LAST ACTIVITY TIME]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(os.environ['conf_os_user'], notebook_config['notebook_ip'], notebook_config['keyfile'])
         try:
@@ -92,16 +83,15 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['notebook_name']))
-        print("Private IP: {}".format(notebook_config['notebook_ip']))
+        logging.info("Instance name: {}".format(notebook_config['notebook_name']))
+        logging.info("Private IP: {}".format(notebook_config['notebook_ip']))
         with open("/root/result.json", 'w') as result:
             res = {"hostname": notebook_config['notebook_ip'],
                    "ip": notebook_config['notebook_ip'],
                    "notebook_name": notebook_config['notebook_name'],
                    "Action": "Start up notebook server"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_stop_notebook.py b/infrastructure-provisioning/src/general/scripts/gcp/common_stop_notebook.py
index 094de05..abde92e 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_stop_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_stop_notebook.py
@@ -25,13 +25,13 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 
 
 def stop_notebook(instance_name, bucket_name, region, zone, ssh_user, key_path, project_name):
-    print('Terminating Dataproc cluster and cleaning Dataproc config from bucket')
+    logging.info('Terminating Dataproc cluster and cleaning Dataproc config from bucket')
     try:
         labels = [
             {instance_name: '*'}
@@ -43,35 +43,35 @@ def stop_notebook(instance_name, bucket_name, region, zone, ssh_user, key_path,
                     'computational_name')
                 cluster = GCPMeta.get_list_cluster_statuses([cluster_name])
                 GCPActions.bucket_cleanup(bucket_name, project_name, cluster_name)
-                print('The bucket {} has been cleaned successfully'.format(bucket_name))
+                logging.info('The bucket {} has been cleaned successfully'.format(bucket_name))
                 GCPActions.delete_dataproc_cluster(cluster_name, region)
-                print('The Dataproc cluster {} has been terminated successfully'.format(cluster_name))
+                logging.info('The Dataproc cluster {} has been terminated successfully'.format(cluster_name))
                 GCPActions.remove_kernels(instance_name, cluster_name, cluster[0]['version'], ssh_user,
                                           key_path, computational_name)
         else:
-            print("There are no Dataproc clusters to terminate.")
+            logging.info("There are no Dataproc clusters to terminate.")
     except Exception as err:
         datalab.fab.append_result("Failed to terminate dataproc", str(err))
         sys.exit(1)
 
-    print("Stopping data engine cluster")
+    logging.info("Stopping data engine cluster")
     try:
         clusters_list = GCPMeta.get_list_instances_by_label(zone, instance_name)
         if clusters_list.get('items'):
             for vm in clusters_list['items']:
                 try:
                     GCPActions.stop_instance(vm['name'], zone)
-                    print("Instance {} has been stopped".format(vm['name']))
+                    logging.info("Instance {} has been stopped".format(vm['name']))
                 except:
                     pass
         else:
-            print("There are no data engine clusters to terminate.")
+            logging.info("There are no data engine clusters to terminate.")
 
     except Exception as err:
         datalab.fab.append_result("Failed to stop dataengine cluster", str(err))
         sys.exit(1)
 
-    print("Stopping notebook")
+    logging.info("Stopping notebook")
     try:
         GCPActions.stop_instance(instance_name, zone)
     except Exception as err:
@@ -80,17 +80,10 @@ def stop_notebook(instance_name, bucket_name, region, zone, ssh_user, key_path,
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     # generating variables dictionary
     GCPMeta = datalab.meta_lib.GCPMeta()
     GCPActions = datalab.actions_lib.GCPActions()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     notebook_config = dict()
     notebook_config['service_base_name'] = (os.environ['conf_service_base_name'])
     notebook_config['edge_user_name'] = (os.environ['edge_user_name'])
@@ -105,14 +98,13 @@ if __name__ == "__main__":
     notebook_config['gcp_zone'] = os.environ['gcp_zone']
 
     logging.info('[STOP NOTEBOOK]')
-    print('[STOP NOTEBOOK]')
     try:
         stop_notebook(notebook_config['notebook_name'], notebook_config['bucket_name'],
                       notebook_config['gcp_region'], notebook_config['gcp_zone'],
                       os.environ['conf_os_user'], notebook_config['key_path'],
                       notebook_config['project_name'])
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.info('Error: {0}'.format(err))
         datalab.fab.append_result("Failed to stop notebook.", str(err))
         sys.exit(1)
 
@@ -120,7 +112,7 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"notebook_name": notebook_config['notebook_name'],
                    "Action": "Stop notebook server"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_terminate_notebook.py b/infrastructure-provisioning/src/general/scripts/gcp/common_terminate_notebook.py
index 100d49d..db40b05 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_terminate_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_terminate_notebook.py
@@ -25,14 +25,14 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
 
 
 def terminate_nb(instance_name, bucket_name, region, zone, user_name):
-    print('Terminating Dataproc cluster and cleaning Dataproc config from bucket')
+    logging.info('Terminating Dataproc cluster and cleaning Dataproc config from bucket')
     try:
         labels = [
             {instance_name: '*'}
@@ -41,33 +41,33 @@ def terminate_nb(instance_name, bucket_name, region, zone, user_name):
         if clusters_list:
             for cluster_name in clusters_list:
                 GCPActions.bucket_cleanup(bucket_name, user_name, cluster_name)
-                print('The bucket {} has been cleaned successfully'.format(bucket_name))
+                logging.info('The bucket {} has been cleaned successfully'.format(bucket_name))
                 GCPActions.delete_dataproc_cluster(cluster_name, region)
-                print('The Dataproc cluster {} has been terminated successfully'.format(cluster_name))
+                logging.info('The Dataproc cluster {} has been terminated successfully'.format(cluster_name))
         else:
-            print("There are no Dataproc clusters to terminate.")
+            logging.info("There are no Dataproc clusters to terminate.")
     except Exception as err:
         datalab.fab.append_result("Failed to terminate dataproc", str(err))
         sys.exit(1)
 
-    print("Terminating data engine cluster")
+    logging.info("Terminating data engine cluster")
     try:
         clusters_list = GCPMeta.get_list_instances_by_label(zone, instance_name)
         if clusters_list.get('items'):
             for vm in clusters_list['items']:
                 try:
                     GCPActions.remove_instance(vm['name'], zone)
-                    print("Instance {} has been terminated".format(vm['name']))
+                    logging.info("Instance {} has been terminated".format(vm['name']))
                 except:
                     pass
         else:
-            print("There are no data engine clusters to terminate.")
+            logging.info("There are no data engine clusters to terminate.")
 
     except Exception as err:
         datalab.fab.append_result("Failed to terminate dataengine", str(err))
         sys.exit(1)
 
-    print("Terminating notebook")
+    logging.info("Terminating notebook")
     try:
         GCPActions.remove_instance(instance_name, zone)
     except Exception as err:
@@ -76,16 +76,10 @@ def terminate_nb(instance_name, bucket_name, region, zone, user_name):
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     # generating variables dictionary
     GCPMeta = datalab.meta_lib.GCPMeta()
     GCPActions = datalab.actions_lib.GCPActions()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     notebook_config = dict()
     notebook_config['service_base_name'] = (os.environ['conf_service_base_name'])
     notebook_config['edge_user_name'] = (os.environ['edge_user_name'])
@@ -100,7 +94,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[TERMINATE NOTEBOOK]')
-        print('[TERMINATE NOTEBOOK]')
         try:
             terminate_nb(notebook_config['notebook_name'], notebook_config['bucket_name'],
                          notebook_config['gcp_region'], notebook_config['gcp_zone'],
@@ -116,7 +109,7 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"notebook_name": notebook_config['notebook_name'],
                    "Action": "Terminate notebook server"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_configure.py
index 1773a61..7cc3c65 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_configure.py
@@ -26,7 +26,7 @@ import datalab.fab
 import datalab.meta_lib
 import datalab.notebook_lib
 import json
-import logging
+from datalab.logger import logging
 import multiprocessing
 import os
 import sys
@@ -40,7 +40,6 @@ def configure_dataengine_service(instance, dataproc_conf):
     # configuring proxy on Data Engine service
     try:
         logging.info('[CONFIGURE PROXY ON DATAENGINE SERVICE]')
-        print('[CONFIGURE PROXY ON DATAENGINE SERVICE]')
         additional_config = {"proxy_host": dataproc_conf['edge_instance_name'], "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}" \
             .format(dataproc_conf['instance_ip'], dataproc_conf['cluster_name'], dataproc_conf['key_path'],
@@ -57,7 +56,6 @@ def configure_dataengine_service(instance, dataproc_conf):
 
     try:
         logging.info('[CONFIGURE DATAENGINE SERVICE]')
-        print('[CONFIGURE DATAENGINE SERVICE]')
         try:
             global conn
             conn = datalab.fab.init_datalab_connection(dataproc_conf['instance_ip'], dataproc_conf['datalab_ssh_user'], dataproc_conf['key_path'])
@@ -77,7 +75,6 @@ def configure_dataengine_service(instance, dataproc_conf):
         sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         slaves = []
         for idx, instance in enumerate(dataproc_conf['cluster_core_instances']):
@@ -119,16 +116,10 @@ def configure_dataengine_service(instance, dataproc_conf):
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.INFO,
-                        filename=local_log_filepath)
     try:
         GCPMeta = datalab.meta_lib.GCPMeta()
         GCPActions = datalab.actions_lib.GCPActions()
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         dataproc_conf = dict()
         if 'exploratory_name' in os.environ:
             dataproc_conf['exploratory_name'] = os.environ['exploratory_name'].replace('_', '-').lower()
@@ -216,21 +207,20 @@ if __name__ == "__main__":
         dataproc_master_access_url = "https://" + dataproc_conf['edge_instance_hostname'] + "/{}/".format(
             dataproc_conf['exploratory_name'] + '_' + dataproc_conf['computational_name'])
         logging.info('[SUMMARY]')
-        print('[SUMMARY]')
-        print("Service base name: {}".format(dataproc_conf['service_base_name']))
-        print("Cluster name: {}".format(dataproc_conf['cluster_name']))
-        print("Key name: {}".format(dataproc_conf['key_name']))
-        print("Region: {}".format(dataproc_conf['region']))
-        print("Zone: {}".format(dataproc_conf['zone']))
-        print("Subnet: {}".format(dataproc_conf['subnet']))
-        print("Dataproc version: {}".format(dataproc_conf['release_label']))
-        print("Dataproc master node shape: {}".format(os.environ['dataproc_master_instance_type']))
-        print("Dataproc slave node shape: {}".format(os.environ['dataproc_slave_instance_type']))
-        print("Master count: {}".format(os.environ['dataproc_master_count']))
-        print("Slave count: {}".format(os.environ['dataproc_slave_count']))
-        print("Preemptible count: {}".format(os.environ['dataproc_preemptible_count']))
-        print("Notebook hostname: {}".format(os.environ['notebook_instance_name']))
-        print("Bucket name: {}".format(dataproc_conf['bucket_name']))
+        logging.info("Service base name: {}".format(dataproc_conf['service_base_name']))
+        logging.info("Cluster name: {}".format(dataproc_conf['cluster_name']))
+        logging.info("Key name: {}".format(dataproc_conf['key_name']))
+        logging.info("Region: {}".format(dataproc_conf['region']))
+        logging.info("Zone: {}".format(dataproc_conf['zone']))
+        logging.info("Subnet: {}".format(dataproc_conf['subnet']))
+        logging.info("Dataproc version: {}".format(dataproc_conf['release_label']))
+        logging.info("Dataproc master node shape: {}".format(os.environ['dataproc_master_instance_type']))
+        logging.info("Dataproc slave node shape: {}".format(os.environ['dataproc_slave_instance_type']))
+        logging.info("Master count: {}".format(os.environ['dataproc_master_count']))
+        logging.info("Slave count: {}".format(os.environ['dataproc_slave_count']))
+        logging.info("Preemptible count: {}".format(os.environ['dataproc_preemptible_count']))
+        logging.info("Notebook hostname: {}".format(os.environ['notebook_instance_name']))
+        logging.info("Bucket name: {}".format(dataproc_conf['bucket_name']))
         with open("/root/result.json", 'w') as result:
             res = {"hostname": dataproc_conf['cluster_name'],
                    "key_name": dataproc_conf['key_name'],
@@ -242,7 +232,7 @@ if __name__ == "__main__":
                         "url": dataproc_master_access_url}
                    ]
                    }
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_create.py b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_create.py
index f63e94c..978d2d4 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_create.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_create.py
@@ -27,6 +27,7 @@ import json
 import sys
 from datalab.actions_lib import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -39,12 +40,12 @@ args = parser.parse_args()
 
 def upload_jars_parser(args):
     if not datalab.actions_lib.GCPActions().put_to_bucket(args.bucket, '/root/scripts/dataengine-service_jars_parser.py', 'jars_parser.py'):
-        print('Failed to upload jars_parser script')
+        logging.info('Failed to upload jars_parser script')
         raise Exception
 
 
 def build_dataproc_cluster(args, cluster_name):
-    print("Will be created cluster: {}".format(json.dumps(params, sort_keys=True, indent=4, separators=(',', ': '))))
+    logging.info("Will be created cluster: {}".format(json.dumps(params, sort_keys=True, indent=4, separators=(',', ': '))))
     return datalab.actions_lib.GCPActions().create_dataproc_cluster(cluster_name, args.region, params)
 
 
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_install_libs.py b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_install_libs.py
index 32475e1..22fd566 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_install_libs.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_install_libs.py
@@ -21,7 +21,7 @@
 #
 # ******************************************************************************
 
-import logging
+from datalab.logger import logging
 import multiprocessing
 import os
 import sys
@@ -48,16 +48,8 @@ def install_libs(instance, data_engine):
 
 if __name__ == "__main__":
     instance_class = 'notebook'
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     try:
         logging.info('[INSTALLING ADDITIONAL LIBRARIES ON DATAENGINE-SERVICE]')
-        print('[INSTALLING ADDITIONAL LIBRARIES ON DATAENGINE-SERVICE]')
         data_engine = dict()
         try:
             data_engine['os_user'] = os.environ['conf_os_user']
@@ -70,7 +62,7 @@ if __name__ == "__main__":
             data_engine['keyfile'] = '{}{}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
             data_engine['libs'] = os.environ['libs']
         except Exception as err:
-            print('Error: {0}'.format(err))
+            logging.error('Error: {0}'.format(err))
             append_result("Failed to get parameter.", str(err))
             sys.exit(1)
         try:
@@ -88,6 +80,6 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         append_result("Failed to install additional libraries.", str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_list_libs.py b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_list_libs.py
index 55af8b5..4b4af7c 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_list_libs.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_list_libs.py
@@ -21,7 +21,7 @@
 #
 # ******************************************************************************
 
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -29,20 +29,13 @@ import subprocess
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 from fabric import *
 
 if __name__ == "__main__":
     instance_class = 'notebook'
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     try:
         logging.info('[GETTING AVAILABLE PACKAGES]')
-        print('[GETTING AVAILABLE PACKAGES]')
         data_engine = dict()
         try:
             data_engine['os_user'] = os.environ['conf_os_user']
@@ -66,6 +59,6 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         append_result("Failed to get available libraries.", str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_prepare.py b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_prepare.py
index 3229525..5c8972d 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_prepare.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import time
@@ -35,16 +35,10 @@ from Crypto.PublicKey import RSA
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.INFO,
-                        filename=local_log_filepath)
     try:
         GCPMeta = datalab.meta_lib.GCPMeta()
         GCPActions = datalab.actions_lib.GCPActions()
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         dataproc_conf = dict()
         if 'exploratory_name' in os.environ:
             dataproc_conf['exploratory_name'] = os.environ['exploratory_name'].replace('_', '-').lower()
@@ -118,7 +112,7 @@ if __name__ == "__main__":
     edge_status = GCPMeta.get_instance_status(dataproc_conf['edge_instance_hostname'])
     if edge_status != 'RUNNING':
         logging.info('ERROR: Edge node is unavailable! Aborting...')
-        print('ERROR: Edge node is unavailable! Aborting...')
+        logging.info('ERROR: Edge node is unavailable! Aborting...')
         ssn_hostname = GCPMeta.get_private_ip_address(dataproc_conf['service_base_name'] + '-ssn')
         datalab.fab.put_resource_status('edge', 'Unavailable', os.environ['ssn_datalab_path'],
                                         os.environ['conf_os_user'],
@@ -126,7 +120,7 @@ if __name__ == "__main__":
         datalab.fab.append_result("Edge node is unavailable")
         sys.exit(1)
 
-    print("Will create exploratory environment with edge node as access point as following: ".format(
+    logging.info("Will create exploratory environment with edge node as access point as following: ".format(
         json.dumps(dataproc_conf, sort_keys=True, indent=4, separators=(',', ': '))))
     logging.info(json.dumps(dataproc_conf))
 
@@ -181,7 +175,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[Creating Dataproc Cluster]')
-        print('[Creating Dataproc Cluster]')
         params = "--region {0} --bucket {1} --params '{2}'".format(dataproc_conf['region'],
                                                                    dataproc_conf['bucket_name'],
                                                                    json.dumps(dataproc_cluster))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_terminate.py b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_terminate.py
index f66a67d..c30cad2 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_terminate.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_terminate.py
@@ -25,43 +25,36 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
 
 
 def terminate_dataproc_cluster(notebook_name, dataproc_name, bucket_name, ssh_user, key_path):
-    print('Terminating Dataproc cluster and cleaning Dataproc config from bucket')
+    logging.info('Terminating Dataproc cluster and cleaning Dataproc config from bucket')
     try:
         cluster = GCPMeta.get_list_cluster_statuses([dataproc_name])
         if cluster[0]['status'] == 'running':
             computational_name = GCPMeta.get_cluster(dataproc_name).get('labels').get('computational_name')
             GCPActions.bucket_cleanup(bucket_name, dataproc_conf['project_name'], dataproc_name)
-            print('The bucket {} has been cleaned successfully'.format(bucket_name))
+            logging.info('The bucket {} has been cleaned successfully'.format(bucket_name))
             GCPActions.delete_dataproc_cluster(dataproc_name, os.environ['gcp_region'])
-            print('The Dataproc cluster {} has been terminated successfully'.format(dataproc_name))
+            logging.info('The Dataproc cluster {} has been terminated successfully'.format(dataproc_name))
             GCPActions.remove_kernels(notebook_name, dataproc_name, cluster[0]['version'], ssh_user,
                                                     key_path, computational_name)
         else:
-            print("There are no Dataproc clusters to terminate.")
+            logging.info("There are no Dataproc clusters to terminate.")
     except Exception as err:
         datalab.fab.append_result("Failed to terminate Dataproc cluster.", str(err))
         sys.exit(1)
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     # generating variables dictionary
     GCPMeta = datalab.meta_lib.GCPMeta()
     GCPActions = datalab.actions_lib.GCPActions()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     dataproc_conf = dict()
     dataproc_conf['service_base_name'] = os.environ['conf_service_base_name']
     dataproc_conf['edge_user_name'] = (os.environ['edge_user_name'])
@@ -79,7 +72,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[TERMINATE DATAPROC CLUSTER]')
-        print('[TERMINATE DATAPROC CLUSTER]')
         try:
             terminate_dataproc_cluster(dataproc_conf['notebook_name'], dataproc_conf['dataproc_name'],
                                        dataproc_conf['bucket_name'], os.environ['conf_os_user'],
@@ -97,7 +89,7 @@ if __name__ == "__main__":
                    "notebook_name": dataproc_conf['notebook_name'],
                    "user_own_bucket_name": dataproc_conf['bucket_name'],
                    "Action": "Terminate Dataproc cluster"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/dataengine_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/dataengine_configure.py
index 5a33caa..76bfb13 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine_configure.py
@@ -25,7 +25,7 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import multiprocessing
 import os
 import sys
@@ -39,7 +39,6 @@ def configure_slave(slave_number, data_engine):
     slave_hostname = GCPMeta.get_private_ip_address(slave_name)
     try:
         logging.info('[CREATING DATALAB SSH USER ON SLAVE NODE]')
-        print('[CREATING DATALAB SSH USER ON SLAVE NODE]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format \
             (slave_hostname, os.environ['conf_key_dir'] + data_engine['key_name'] + ".pem", initial_user,
              data_engine['datalab_ssh_user'], sudo_group)
@@ -55,7 +54,6 @@ def configure_slave(slave_number, data_engine):
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY ON SLAVE NODE]')
         logging.info('[INSTALLING USERs KEY ON SLAVE NODE]')
         additional_config = {"user_keyname": data_engine['project_name'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -74,7 +72,6 @@ def configure_slave(slave_number, data_engine):
 
     try:
         logging.info('[CONFIGURE PROXY ON SLAVE NODE]')
-        print('[CONFIGURE PROXY ON ON SLAVE NODE]')
         additional_config = {"proxy_host": edge_instance_name, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}"\
             .format(slave_hostname, slave_name, keyfile_name, json.dumps(additional_config),
@@ -91,7 +88,6 @@ def configure_slave(slave_number, data_engine):
 
     try:
         logging.info('[INSTALLING PREREQUISITES ON SLAVE NODE]')
-        print('[INSTALLING PREREQUISITES ON SLAVE NODE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}". \
             format(slave_hostname, keyfile_name, data_engine['datalab_ssh_user'], data_engine['region'],
                    edge_instance_private_ip)
@@ -107,7 +103,6 @@ def configure_slave(slave_number, data_engine):
 
     try:
         logging.info('[CONFIGURE SLAVE NODE {}]'.format(slave + 1))
-        print('[CONFIGURE SLAVE NODE {}]'.format(slave + 1))
         params = "--hostname {} --keyfile {} --region {} --spark_version {} --hadoop_version {} --os_user {} " \
                  "--scala_version {} --master_ip {} --node_type {}". \
             format(slave_hostname, keyfile_name, data_engine['region'], os.environ['notebook_spark_version'],
@@ -125,7 +120,7 @@ def configure_slave(slave_number, data_engine):
 
     if 'slave_gpu_type' in os.environ:
         try:
-            print('[INSTALLING GPU DRIVERS ON MASTER NODE]')
+            logging.info('[INSTALLING GPU DRIVERS ON MASTER NODE]')
             params = "--hostname {} --keyfile {} --os_user {}".format(
                 slave_hostname, keyfile_name, data_engine['datalab_ssh_user'])
             try:
@@ -148,17 +143,10 @@ def clear_resources():
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.INFO,
-                        filename=local_log_filepath)
-
     try:
         GCPMeta = datalab.meta_lib.GCPMeta()
         GCPActions = datalab.actions_lib.GCPActions()
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         data_engine = dict()
         data_engine['service_base_name'] = (os.environ['conf_service_base_name'])
         data_engine['edge_user_name'] = (os.environ['edge_user_name'])
@@ -228,7 +216,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATING DATALAB SSH USER ON MASTER NODE]')
-        print('[CREATING DATALAB SSH USER ON MASTER NODE]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format \
             (master_node_hostname, os.environ['conf_key_dir'] + data_engine['key_name'] + ".pem", initial_user,
              data_engine['datalab_ssh_user'], sudo_group)
@@ -244,7 +231,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY ON MASTER NODE]')
         logging.info('[INSTALLING USERs KEY ON MASTER NODE]')
         additional_config = {"user_keyname": data_engine['project_name'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -263,7 +249,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CONFIGURE PROXY ON MASTER NODE]')
-        print('[CONFIGURE PROXY ON ON MASTER NODE]')
         additional_config = {"proxy_host": edge_instance_name, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}"\
             .format(master_node_hostname, data_engine['master_node_name'], keyfile_name, json.dumps(additional_config),
@@ -280,7 +265,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[INSTALLING PREREQUISITES ON MASTER NODE]')
-        print('[INSTALLING PREREQUISITES ON MASTER NODE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}". \
             format(master_node_hostname, keyfile_name, data_engine['datalab_ssh_user'], data_engine['region'],
                    edge_instance_private_ip)
@@ -296,7 +280,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CONFIGURE MASTER NODE]')
-        print('[CONFIGURE MASTER NODE]')
         params = "--hostname {} --keyfile {} --region {} --spark_version {} --hadoop_version {} --os_user {} " \
                  "--scala_version {} --master_ip {} --node_type {}".\
             format(master_node_hostname, keyfile_name, data_engine['region'], os.environ['notebook_spark_version'],
@@ -315,7 +298,7 @@ if __name__ == "__main__":
 
     if 'master_gpu_type' in os.environ:
         try:
-            print('[INSTALLING GPU DRIVERS ON MASTER NODE]')
+            logging.info('[INSTALLING GPU DRIVERS ON MASTER NODE]')
             params = "--hostname {} --keyfile {} --os_user {}".format(
                 master_node_hostname, keyfile_name, data_engine['datalab_ssh_user'])
             try:
@@ -346,7 +329,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         notebook_instance_ip = GCPMeta.get_private_ip_address(data_engine['notebook_name'])
         additional_info = {
@@ -386,13 +368,12 @@ if __name__ == "__main__":
         spark_master_access_url = "https://" + edge_instance_hostname + "/{}/".format(
             data_engine['exploratory_name'] + '_' + data_engine['computational_name'])
         logging.info('[SUMMARY]')
-        print('[SUMMARY]')
-        print("Service base name: {}".format(data_engine['service_base_name']))
-        print("Region: {}".format(data_engine['region']))
-        print("Cluster name: {}".format(data_engine['cluster_name']))
-        print("Master node shape: {}".format(data_engine['master_size']))
-        print("Slave node shape: {}".format(data_engine['slave_size']))
-        print("Instance count: {}".format(str(data_engine['instance_count'])))
+        logging.info("Service base name: {}".format(data_engine['service_base_name']))
+        logging.info("Region: {}".format(data_engine['region']))
+        logging.info("Cluster name: {}".format(data_engine['cluster_name']))
+        logging.info("Master node shape: {}".format(data_engine['master_size']))
+        logging.info("Slave node shape: {}".format(data_engine['slave_size']))
+        logging.info("Instance count: {}".format(str(data_engine['instance_count'])))
         with open("/root/result.json", 'w') as result:
             res = {"hostname": data_engine['cluster_name'],
                    "instance_id": data_engine['master_node_name'],
@@ -405,7 +386,7 @@ if __name__ == "__main__":
                        # "url": spark_master_url}
                    ]
                    }
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/dataengine_prepare.py b/infrastructure-provisioning/src/general/scripts/gcp/dataengine_prepare.py
index 96ba448..2db42e6 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine_prepare.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -34,16 +34,10 @@ from fabric import *
 
 if __name__ == "__main__":
     instance_class = 'notebook'
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         GCPMeta = datalab.meta_lib.GCPMeta()
         GCPActions = datalab.actions_lib.GCPActions()
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         data_engine = dict()
         data_engine['service_base_name'] = (os.environ['conf_service_base_name'])
         data_engine['edge_user_name'] = (os.environ['edge_user_name'])
@@ -64,7 +58,6 @@ if __name__ == "__main__":
                                                                             data_engine['endpoint_name']))
         if edge_status != 'RUNNING':
             logging.info('ERROR: Edge node is unavailable! Aborting...')
-            print('ERROR: Edge node is unavailable! Aborting...')
             ssn_hostname = GCPMeta.get_private_ip_address(data_engine['service_base_name'] + '-ssn')
             datalab.fab.put_resource_status('edge', 'Unavailable', os.environ['ssn_datalab_path'],
                                             os.environ['conf_os_user'],
@@ -131,12 +124,12 @@ if __name__ == "__main__":
                 data_engine['service_base_name'], data_engine['endpoint_tag'], os.environ['application'])
         data_engine['notebook_primary_image_name'] = (lambda x: os.environ['notebook_primary_image_name'] if x != 'None'
         else data_engine['expected_primary_image_name'])(str(os.environ.get('notebook_primary_image_name')))
-        print('Searching pre-configured images')
+        logging.info('Searching pre-configured images')
         data_engine['primary_image_name'] = GCPMeta.get_image_by_name(data_engine['notebook_primary_image_name'])
         if data_engine['primary_image_name'] == '':
             data_engine['primary_image_name'] = os.environ['gcp_{}_image_name'.format(os.environ['conf_os_family'])]
         else:
-            print('Pre-configured primary image found. Using: {}'.format(data_engine['primary_image_name'].get('name')))
+            logging.info('Pre-configured primary image found. Using: {}'.format(data_engine['primary_image_name'].get('name')))
             data_engine['primary_image_name'] = 'global/images/{}'.format(
                 data_engine['primary_image_name'].get('name'))
 
@@ -144,7 +137,7 @@ if __name__ == "__main__":
         if data_engine['secondary_image_name'] == '':
             data_engine['secondary_image_name'] = 'None'
         else:
-            print('Pre-configured secondary image found. Using: {}'.format(
+            logging.info('Pre-configured secondary image found. Using: {}'.format(
                 data_engine['secondary_image_name'].get('name')))
             data_engine['secondary_image_name'] = 'global/images/{}'.format(
                 data_engine['secondary_image_name'].get('name'))
@@ -189,7 +182,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATE MASTER NODE]')
-        print('[CREATE MASTER NODE]')
         params = "--instance_name {0} --region {1} --zone {2} --vpc_name {3} --subnet_name {4} --instance_size {5} " \
                  "--ssh_key_path {6} --initial_user {7} --service_account_name {8} --image_name {9} " \
                  "--secondary_image_name {10} --instance_class {11} --primary_disk_size {12} " \
@@ -215,7 +207,6 @@ if __name__ == "__main__":
     try:
         for i in range(data_engine['instance_count'] - 1):
             logging.info('[CREATE SLAVE NODE {}]'.format(i + 1))
-            print('[CREATE SLAVE NODE {}]'.format(i + 1))
             slave_name = data_engine['slave_node_name'] + '{}'.format(i + 1)
             params = "--instance_name {0} --region {1} --zone {2} --vpc_name {3} --subnet_name {4} " \
                      "--instance_size {5} --ssh_key_path {6} --initial_user {7} --service_account_name {8} " \
@@ -242,7 +233,7 @@ if __name__ == "__main__":
             try:
                 GCPActions.remove_instance(slave_name, data_engine['zone'])
             except:
-                print("The slave instance {} hasn't been created.".format(slave_name))
+                logging.error("The slave instance {} hasn't been created.".format(slave_name))
         GCPActions.remove_instance(data_engine['master_node_name'], data_engine['zone'])
         datalab.fab.append_result("Failed to create slave instances.", str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/dataengine_start.py b/infrastructure-provisioning/src/general/scripts/gcp/dataengine_start.py
index 7843592..865d846 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine_start.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine_start.py
@@ -25,7 +25,7 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -34,7 +34,7 @@ from fabric import *
 
 
 def start_data_engine(zone, cluster_name):
-    print("Starting data engine cluster")
+    logging.info("Starting data engine cluster")
     try:
         instances = GCPMeta.get_list_instances(zone, cluster_name)
         if 'items' in instances:
@@ -46,16 +46,10 @@ def start_data_engine(zone, cluster_name):
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     # generating variables dictionary
     GCPMeta = datalab.meta_lib.GCPMeta()
     GCPActions = datalab.actions_lib.GCPActions()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     data_engine = dict()
     if 'exploratory_name' in os.environ:
         data_engine['exploratory_name'] = os.environ['exploratory_name'].replace('_', '-').lower()
@@ -76,7 +70,6 @@ if __name__ == "__main__":
                                                           data_engine['computational_name'])
     try:
         logging.info('[STARTING DATA ENGINE]')
-        print('[STARTING DATA ENGINE]')
         try:
             start_data_engine(data_engine['zone'], data_engine['cluster_name'])
         except Exception as err:
@@ -88,7 +81,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[UPDATE LAST ACTIVITY TIME]')
-        print('[UPDATE LAST ACTIVITY TIME]')
         data_engine['computational_id'] = data_engine['cluster_name'] + '-m'
         data_engine['tag_name'] = data_engine['service_base_name'] + '-tag'
         data_engine['notebook_ip'] = GCPMeta.get_private_ip_address(os.environ['notebook_instance_name'])
@@ -110,7 +102,7 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"service_base_name": data_engine['service_base_name'],
                    "Action": "Start Data Engine"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/dataengine_stop.py b/infrastructure-provisioning/src/general/scripts/gcp/dataengine_stop.py
index 1a06c2d..20ee0ba 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine_stop.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine_stop.py
@@ -25,14 +25,14 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
 
 
 def stop_data_engine(zone, cluster_name):
-    print("Stopping data engine cluster")
+    logging.info("Stopping data engine cluster")
     try:
         instances = GCPMeta.get_list_instances(zone, cluster_name)
         if 'items' in instances:
@@ -44,16 +44,10 @@ def stop_data_engine(zone, cluster_name):
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     # generating variables dictionary
     GCPMeta = datalab.meta_lib.GCPMeta()
     GCPActions = datalab.actions_lib.GCPActions()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     data_engine = dict()
     if 'exploratory_name' in os.environ:
         data_engine['exploratory_name'] = os.environ['exploratory_name'].replace('_', '-').lower()
@@ -74,7 +68,6 @@ if __name__ == "__main__":
                                                           data_engine['computational_name'])
     try:
         logging.info('[STOPPING DATA ENGINE]')
-        print('[STOPPING DATA ENGINE]')
         try:
             stop_data_engine(data_engine['zone'], data_engine['cluster_name'])
         except Exception as err:
@@ -87,7 +80,7 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"service_base_name": data_engine['service_base_name'],
                    "Action": "Stop Data Engine"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/dataengine_terminate.py b/infrastructure-provisioning/src/general/scripts/gcp/dataengine_terminate.py
index 5751014..24516c5 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine_terminate.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine_terminate.py
@@ -25,14 +25,14 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
 
 
 def terminate_data_engine(zone, notebook_name, os_user, key_path, cluster_name):
-    print("Terminating data engine cluster")
+    logging.info("Terminating data engine cluster")
     try:
         instances = GCPMeta.get_list_instances(zone, cluster_name)
         if 'items' in instances:
@@ -42,7 +42,7 @@ def terminate_data_engine(zone, notebook_name, os_user, key_path, cluster_name):
         datalab.fab.append_result("Failed to terminate dataengine", str(err))
         sys.exit(1)
 
-    print("Removing Data Engine kernels from notebook")
+    logging.info("Removing Data Engine kernels from notebook")
     try:
         datalab.actions_lib.remove_dataengine_kernels(notebook_name, os_user, key_path, cluster_name)
     except Exception as err:
@@ -51,16 +51,10 @@ def terminate_data_engine(zone, notebook_name, os_user, key_path, cluster_name):
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     # generating variables dictionary
     GCPMeta = datalab.meta_lib.GCPMeta()
     GCPActions = datalab.actions_lib.GCPActions()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     data_engine = dict()
     if 'exploratory_name' in os.environ:
         data_engine['exploratory_name'] = os.environ['exploratory_name'].replace('_', '-').lower()
@@ -85,7 +79,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[TERMINATE DATA ENGINE]')
-        print('[TERMINATE DATA ENGINE]')
         try:
             terminate_data_engine(data_engine['zone'], data_engine['notebook_name'], os.environ['conf_os_user'],
                                   data_engine['key_path'], data_engine['cluster_name'])
@@ -100,7 +93,7 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"service_base_name": data_engine['service_base_name'],
                    "Action": "Terminate Data Engine"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/deeplearning_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/deeplearning_configure.py
index 5f8fea4..be615de 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/deeplearning_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/deeplearning_configure.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -33,12 +33,6 @@ import subprocess
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         GCPMeta = datalab.meta_lib.GCPMeta()
         GCPActions = datalab.actions_lib.GCPActions()
@@ -105,7 +99,6 @@ if __name__ == "__main__":
             notebook_config['sudo_group'] = 'wheel'
 
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             instance_hostname, notebook_config['ssh_key_path'], notebook_config['initial_user'],
             notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
@@ -123,7 +116,6 @@ if __name__ == "__main__":
     # configuring proxy on Notebook instance
     try:
         logging.info('[CONFIGURE PROXY ON DEEPLEARNING INSTANCE]')
-        print('[CONFIGURE PROXY ON DEEPLEARNING INSTANCE]')
         additional_config = {"proxy_host": edge_instance_name, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}" \
             .format(instance_hostname, notebook_config['instance_name'], notebook_config['ssh_key_path'],
@@ -141,7 +133,6 @@ if __name__ == "__main__":
     # updating repositories & installing python packages
     try:
         logging.info('[INSTALLING PREREQUISITES TO DEEPLEARNING NOTEBOOK INSTANCE]')
-        print('[INSTALLING PREREQUISITES TO DEEPLEARNING NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --user {} --region {}". \
             format(instance_hostname, notebook_config['ssh_key_path'], notebook_config['datalab_ssh_user'],
                    os.environ['gcp_region'])
@@ -157,7 +148,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CONFIGURE DEEP LEARNING NOTEBOOK INSTANCE]')
-        print('[CONFIGURE DEEP LEARNING NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} " \
                  "--os_user {} --jupyter_version {} " \
                  "--scala_version {} --spark_version {} " \
@@ -179,7 +169,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": notebook_config['project_name'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -197,7 +186,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, notebook_config['ssh_key_path'])
@@ -214,20 +202,20 @@ if __name__ == "__main__":
 
     if notebook_config['image_enabled'] == 'true':
         try:
-            print('[CREATING IMAGE]')
+            logging.info('[CREATING IMAGE]')
             primary_image_id = GCPMeta.get_image_by_name(notebook_config['expected_primary_image_name'])
             if primary_image_id == '':
-                print("Looks like it's first time we configure notebook server. Creating images.")
+                logging.info("Looks like it's first time we configure notebook server. Creating images.")
                 image_id_list = GCPActions.create_image_from_instance_disks(
                     notebook_config['expected_primary_image_name'], notebook_config['expected_secondary_image_name'],
                     notebook_config['instance_name'], notebook_config['zone'], notebook_config['image_labels'])
                 if image_id_list and image_id_list[0] != '':
-                    print("Image of primary disk was successfully created. It's ID is {}".format(image_id_list[0]))
+                    logging.info("Image of primary disk was successfully created. It's ID is {}".format(image_id_list[0]))
                 else:
-                    print("Looks like another image creating operation for your template have been started a "
+                    logging.info("Looks like another image creating operation for your template have been started a "
                           "moment ago.")
                 if image_id_list and image_id_list[1] != '':
-                    print("Image of secondary disk was successfully created. It's ID is {}".format(image_id_list[1]))
+                    logging.info("Image of secondary disk was successfully created. It's ID is {}".format(image_id_list[1]))
         except Exception as err:
             datalab.fab.append_result("Failed creating image.", str(err))
             GCPActions.remove_instance(notebook_config['instance_name'], notebook_config['zone'])
@@ -236,7 +224,6 @@ if __name__ == "__main__":
             sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         additional_info = {
             'instance_hostname': instance_hostname,
@@ -277,18 +264,17 @@ if __name__ == "__main__":
             notebook_config['exploratory_name'])
         #tensorboard_access_url = "https://" + edge_instance_hostname + "/{}-tensor/".format(
         #    notebook_config['exploratory_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['instance_name']))
-        print("Private IP: {}".format(ip_address))
-        print("Instance type: {}".format(notebook_config['instance_type']))
-        print("Key name: {}".format(notebook_config['key_name']))
-        print("User key name: {}".format(notebook_config['project_name']))
-        #print("TensorBoard URL: {}".format(tensorboard_url))
-        #print("TensorBoard log dir: /var/log/tensorboard")
-        print("JupyterLab URL: {}".format(jupyter_ip_url))
-        print("Ungit URL: {}".format(ungit_ip_url))
-        print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(
+        logging.info("Instance name: {}".format(notebook_config['instance_name']))
+        logging.info("Private IP: {}".format(ip_address))
+        logging.info("Instance type: {}".format(notebook_config['instance_type']))
+        logging.info("Key name: {}".format(notebook_config['key_name']))
+        logging.info("User key name: {}".format(notebook_config['project_name']))
+        #logging.info("TensorBoard URL: {}".format(tensorboard_url))
+        #logging.info("TensorBoard log dir: /var/log/tensorboard")
+        logging.info("JupyterLab URL: {}".format(jupyter_ip_url))
+        logging.info("Ungit URL: {}".format(ungit_ip_url))
+        logging.info('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(
             notebook_config['key_name'], notebook_config['datalab_ssh_user'], ip_address))
 
         with open("/root/result.json", 'w') as result:
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/edge_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/edge_configure.py
index ac7fbb0..c085ade 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/edge_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/edge_configure.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -34,13 +34,6 @@ import subprocess
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/edge/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     def clear_resources():
         GCPActions.remove_instance(edge_conf['instance_name'], edge_conf['zone'])
         GCPActions.remove_static_address(edge_conf['static_address_name'], edge_conf['region'])
@@ -62,7 +55,7 @@ if __name__ == "__main__":
     try:
         GCPMeta = datalab.meta_lib.GCPMeta()
         GCPActions = datalab.actions_lib.GCPActions()
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         edge_conf = dict()
         edge_conf['service_base_name'] = (os.environ['conf_service_base_name'])
         edge_conf['project_name'] = (os.environ['project_name']).replace('_', '-').lower()
@@ -155,7 +148,6 @@ if __name__ == "__main__":
             edge_conf['sudo_group'] = 'wheel'
 
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             edge_conf['instance_hostname'], "/root/keys/" + os.environ['conf_key_name'] + ".pem",
             edge_conf['initial_user'], edge_conf['datalab_ssh_user'], edge_conf['sudo_group'])
@@ -171,7 +163,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING PREREQUISITES]')
         logging.info('[INSTALLING PREREQUISITES]')
         params = "--hostname {} --keyfile {} --user {} --region {}".format(
             edge_conf['instance_hostname'], edge_conf['ssh_key_path'], edge_conf['datalab_ssh_user'],
@@ -187,7 +178,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING HTTP PROXY]')
         logging.info('[INSTALLING HTTP PROXY]')
         additional_config = {"exploratory_subnet": edge_conf['private_subnet_cidr'],
                              "template_file": "/root/templates/squid.conf",
@@ -213,7 +203,6 @@ if __name__ == "__main__":
 
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": edge_conf['user_keyname'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -231,7 +220,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING NGINX REVERSE PROXY]')
         logging.info('[INSTALLING NGINX REVERSE PROXY]')
         edge_conf['keycloak_client_secret'] = str(uuid.uuid4())
         params = "--hostname {} --keyfile {} --user {} --keycloak_client_id {} --keycloak_client_secret {} " \
@@ -271,9 +259,9 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[CONFIGRING EDGE AS NAT]')
+        logging.info('[CONFIGRING EDGE AS NAT]')
         if os.environ['edge_is_nat'] == 'true':
-            print('Installing nftables')
+            logging.info('Installing nftables')
             additional_config = {"exploratory_subnet": edge_conf['private_subnet_cidr'],
                                  "edge_ip": edge_conf['private_ip']}
             params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
@@ -298,17 +286,16 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(edge_conf['instance_name']))
-        print("Hostname: {}".format(edge_conf['instance_hostname']))
-        print("Public IP: {}".format(edge_conf['static_ip']))
-        print("Private IP: {}".format(edge_conf['private_ip']))
-        print("Key name: {}".format(edge_conf['key_name']))
-        print("Bucket name: {}".format(edge_conf['bucket_name']))
-        print("Shared bucket name: {}".format(edge_conf['shared_bucket_name']))
-        print("Notebook subnet: {}".format(edge_conf['private_subnet_cidr']))
-        print("Available GPU types: {}".format(edge_conf['gpu_types']))
+        logging.info("Instance name: {}".format(edge_conf['instance_name']))
+        logging.info("Hostname: {}".format(edge_conf['instance_hostname']))
+        logging.info("Public IP: {}".format(edge_conf['static_ip']))
+        logging.info("Private IP: {}".format(edge_conf['private_ip']))
+        logging.info("Key name: {}".format(edge_conf['key_name']))
+        logging.info("Bucket name: {}".format(edge_conf['bucket_name']))
+        logging.info("Shared bucket name: {}".format(edge_conf['shared_bucket_name']))
+        logging.info("Notebook subnet: {}".format(edge_conf['private_subnet_cidr']))
+        logging.info("Available GPU types: {}".format(edge_conf['gpu_types']))
         with open("/root/result.json", 'w') as result:
             res = {"hostname": edge_conf['instance_hostname'],
                    "public_ip": edge_conf['static_ip'],
@@ -325,7 +312,7 @@ if __name__ == "__main__":
                    "gpu_types": edge_conf['gpu_types'],
                    "@class": "com.epam.datalab.dto.gcp.edge.EdgeInfoGcp",
                    "Action": "Create new EDGE server"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results.", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/edge_create_static_ip.py b/infrastructure-provisioning/src/general/scripts/gcp/edge_create_static_ip.py
index 0411f7e..3564fb9 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/edge_create_static_ip.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/edge_create_static_ip.py
@@ -27,6 +27,7 @@ import sys
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--address_name', type=str, default='')
@@ -34,18 +35,12 @@ parser.add_argument('--region', type=str, default='')
 args = parser.parse_args()
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         if GCPMeta().get_static_address(args.region, args.address_name):
-            print("REQUESTED STATIC ADDRESS {} ALREADY EXISTS".format(args.address_name))
+            logging.info("REQUESTED STATIC ADDRESS {} ALREADY EXISTS".format(args.address_name))
         else:
-            print("Creating Elastic IP")
+            logging.info("Creating Elastic IP")
             GCPActions().create_static_address(args.address_name, args.region)
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/edge_start.py b/infrastructure-provisioning/src/general/scripts/gcp/edge_start.py
index eddcfa2..2d35732 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/edge_start.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/edge_start.py
@@ -25,22 +25,15 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     # generating variables dictionary
     GCPMeta = datalab.meta_lib.GCPMeta()
     GCPActions = datalab.actions_lib.GCPActions()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     edge_conf = dict()
     edge_conf['service_base_name'] = (os.environ['conf_service_base_name'])
     edge_conf['project_name'] = (os.environ['project_name']).replace('_', '-').lower()
@@ -54,7 +47,6 @@ if __name__ == "__main__":
                                                                edge_conf['endpoint_name'])
 
     logging.info('[START EDGE]')
-    print('[START EDGE]')
     try:
         GCPActions.start_instance(edge_conf['instance_name'], edge_conf['zone'])
     except Exception as err:
@@ -66,19 +58,18 @@ if __name__ == "__main__":
         public_ip_address = \
             GCPMeta.get_static_address(edge_conf['region'], edge_conf['static_address_name'])['address']
         ip_address = GCPMeta.get_private_ip_address(edge_conf['instance_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(edge_conf['instance_name']))
-        print("Hostname: {}".format(instance_hostname))
-        print("Public IP: {}".format(public_ip_address))
-        print("Private IP: {}".format(ip_address))
+        logging.info("Instance name: {}".format(edge_conf['instance_name']))
+        logging.info("Hostname: {}".format(instance_hostname))
+        logging.info("Public IP: {}".format(public_ip_address))
+        logging.info("Private IP: {}".format(ip_address))
         with open("/root/result.json", 'w') as result:
             res = {"instance_name": edge_conf['instance_name'],
                    "hostname": instance_hostname,
                    "public_ip": public_ip_address,
                    "ip": ip_address,
                    "Action": "Start up notebook server"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/edge_status.py b/infrastructure-provisioning/src/general/scripts/gcp/edge_status.py
index d7b2dbb..8ad3c8f 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/edge_status.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/edge_status.py
@@ -28,20 +28,13 @@ import subprocess
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/edge/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
-    print('Getting statuses of DataLab resources')
+    logging.info('Getting statuses of DataLab resources')
 
     try:
-        logging.info('[COLLECT DATA]')
-        print('[COLLECTING DATA]')
+        logging.info('[COLLECTING DATA]')
         params = '--list_resources "{}"'.format(os.environ['edge_list_resources'])
         try:
             subprocess.run("~/scripts/{}.py {}".format('common_collect_data', params), shell=True, check=True)
@@ -49,6 +42,6 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         append_result("Failed to collect information about DataLab resources.", str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/edge_stop.py b/infrastructure-provisioning/src/general/scripts/gcp/edge_stop.py
index e8ac3f3..5490dae 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/edge_stop.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/edge_stop.py
@@ -25,19 +25,12 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     GCPMeta = datalab.meta_lib.GCPMeta()
     GCPActions = datalab.actions_lib.GCPActions()
     edge_conf = dict()
@@ -49,7 +42,6 @@ if __name__ == "__main__":
                                                            edge_conf['project_name'], edge_conf['endpoint_name'])
 
     logging.info('[STOP EDGE]')
-    print('[STOP EDGE]')
     try:
         GCPActions.stop_instance(edge_conf['instance_name'], edge_conf['zone'])
     except Exception as err:
@@ -60,7 +52,7 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"instance_name": edge_conf['instance_name'],
                    "Action": "Stop edge server"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/edge_terminate.py b/infrastructure-provisioning/src/general/scripts/gcp/edge_terminate.py
index d92b697..b0cf36f 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/edge_terminate.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/edge_terminate.py
@@ -25,14 +25,14 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
 
 
 def terminate_edge_node(user_name, service_base_name, region, zone, project_name, endpoint_name):
-    print("Terminating Dataengine-service clusters")
+    logging.info("Terminating Dataengine-service clusters")
     try:
         labels = [
             {'sbn': service_base_name},
@@ -42,14 +42,14 @@ def terminate_edge_node(user_name, service_base_name, region, zone, project_name
         if clusters_list:
             for cluster_name in clusters_list:
                 GCPActions.delete_dataproc_cluster(cluster_name, region)
-                print('The Dataproc cluster {} has been terminated successfully'.format(cluster_name))
+                logging.info('The Dataproc cluster {} has been terminated successfully'.format(cluster_name))
         else:
-            print("There are no Dataproc clusters to terminate.")
+            logging.info("There are no Dataproc clusters to terminate.")
     except Exception as err:
         datalab.fab.append_result("Failed to terminate dataproc", str(err))
         sys.exit(1)
 
-    print("Terminating EDGE and notebook instances")
+    logging.info("Terminating EDGE and notebook instances")
     base = '{}-{}-{}'.format(service_base_name, project_name, endpoint_name)
     keys = ['edge', 'ps', 'static-ip', 'bucket', 'subnet']
     targets = ['{}-{}'.format(base, k) for k in keys]
@@ -63,7 +63,7 @@ def terminate_edge_node(user_name, service_base_name, region, zone, project_name
         datalab.fab.append_result("Failed to terminate instances", str(err))
         sys.exit(1)
 
-    print("Removing static addresses")
+    logging.info("Removing static addresses")
     try:
         static_addresses = GCPMeta.get_list_static_addresses(region, base)
         if 'items' in static_addresses:
@@ -74,7 +74,7 @@ def terminate_edge_node(user_name, service_base_name, region, zone, project_name
         datalab.fab.append_result("Failed to remove static IPs", str(err))
         sys.exit(1)
 
-    print("Removing storage bucket")
+    logging.info("Removing storage bucket")
     try:
         buckets = GCPMeta.get_list_buckets(base)
         if 'items' in buckets:
@@ -85,7 +85,7 @@ def terminate_edge_node(user_name, service_base_name, region, zone, project_name
         datalab.fab.append_result("Failed to remove buckets", str(err))
         sys.exit(1)
 
-    print("Removing firewalls")
+    logging.info("Removing firewalls")
     try:
         firewalls = GCPMeta.get_list_firewalls(base)
         if 'items' in firewalls:
@@ -96,7 +96,7 @@ def terminate_edge_node(user_name, service_base_name, region, zone, project_name
         datalab.fab.append_result("Failed to remove security groups", str(err))
         sys.exit(1)
 
-    print("Removing Service accounts and roles")
+    logging.info("Removing Service accounts and roles")
     try:
         list_service_accounts = GCPMeta.get_list_service_accounts()
         for service_account in (set(targets) & set(list_service_accounts)):
@@ -110,7 +110,7 @@ def terminate_edge_node(user_name, service_base_name, region, zone, project_name
         datalab.fab.append_result("Failed to remove service accounts and roles", str(err))
         sys.exit(1)
 
-    print("Removing subnets")
+    logging.info("Removing subnets")
     try:
         list_subnets = GCPMeta.get_list_subnetworks(region, '', base)
         if 'items' in list_subnets:
@@ -126,17 +126,10 @@ def terminate_edge_node(user_name, service_base_name, region, zone, project_name
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/edge/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     # generating variables dictionary
     GCPMeta = datalab.meta_lib.GCPMeta()
     GCPActions = datalab.actions_lib.GCPActions()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     edge_conf = dict()
     edge_conf['service_base_name'] = (os.environ['conf_service_base_name'])
     edge_conf['edge_user_name'] = (os.environ['edge_user_name'])
@@ -147,7 +140,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[TERMINATE EDGE]')
-        print('[TERMINATE EDGE]')
         try:
             terminate_edge_node(edge_conf['edge_user_name'], edge_conf['service_base_name'],
                                 edge_conf['region'], edge_conf['zone'], edge_conf['project_name'],
@@ -164,7 +156,7 @@ if __name__ == "__main__":
             res = {"service_base_name": edge_conf['service_base_name'],
                    "user_name": edge_conf['edge_user_name'],
                    "Action": "Terminate edge node"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/jupyter_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/jupyter_configure.py
index e27f16b..9a85703 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/jupyter_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/jupyter_configure.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -33,12 +33,6 @@ import subprocess
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         GCPMeta = datalab.meta_lib.GCPMeta()
         GCPActions = datalab.actions_lib.GCPActions()
@@ -105,7 +99,6 @@ if __name__ == "__main__":
             notebook_config['sudo_group'] = 'wheel'
 
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             instance_hostname, notebook_config['ssh_key_path'], notebook_config['initial_user'],
             notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
@@ -123,7 +116,6 @@ if __name__ == "__main__":
     # configuring proxy on Notebook instance
     try:
         logging.info('[CONFIGURE PROXY ON JUPYTER INSTANCE]')
-        print('[CONFIGURE PROXY ON JUPYTER INSTANCE]')
         additional_config = {"proxy_host": edge_instance_name, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}"\
             .format(instance_hostname, notebook_config['instance_name'], notebook_config['ssh_key_path'],
@@ -141,7 +133,6 @@ if __name__ == "__main__":
     # updating repositories & installing python packages
     try:
         logging.info('[INSTALLING PREREQUISITES TO JUPYTER NOTEBOOK INSTANCE]')
-        print('[INSTALLING PREREQUISITES TO JUPYTER NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}". \
             format(instance_hostname, notebook_config['ssh_key_path'], notebook_config['datalab_ssh_user'],
                    os.environ['gcp_region'], edge_instance_private_ip)
@@ -158,7 +149,6 @@ if __name__ == "__main__":
     # installing and configuring jupiter and all dependencies
     try:
         logging.info('[CONFIGURE JUPYTER NOTEBOOK INSTANCE]')
-        print('[CONFIGURE JUPYTER NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} " \
                  "--region {} --spark_version {} " \
                  "--hadoop_version {} --os_user {} " \
@@ -181,7 +171,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": os.environ['project_name'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -199,7 +188,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, notebook_config['ssh_key_path'])
@@ -216,20 +204,20 @@ if __name__ == "__main__":
 
     if notebook_config['image_enabled'] == 'true':
         try:
-            print('[CREATING IMAGE]')
+            logging.info('[CREATING IMAGE]')
             primary_image_id = GCPMeta.get_image_by_name(notebook_config['expected_primary_image_name'])
             if primary_image_id == '':
-                print("Looks like it's first time we configure notebook server. Creating images.")
+                logging.info("Looks like it's first time we configure notebook server. Creating images.")
                 image_id_list = GCPActions.create_image_from_instance_disks(
                     notebook_config['expected_primary_image_name'], notebook_config['expected_secondary_image_name'],
                     notebook_config['instance_name'], notebook_config['zone'], notebook_config['image_labels'])
                 if image_id_list and image_id_list[0] != '':
-                    print("Image of primary disk was successfully created. It's ID is {}".format(image_id_list[0]))
+                    logging.info("Image of primary disk was successfully created. It's ID is {}".format(image_id_list[0]))
                 else:
-                    print("Looks like another image creating operation for your template have been started a "
+                    logging.info("Looks like another image creating operation for your template have been started a "
                           "moment ago.")
                 if image_id_list and image_id_list[1] != '':
-                    print("Image of secondary disk was successfully created. It's ID is {}".format(image_id_list[1]))
+                    logging.info("Image of secondary disk was successfully created. It's ID is {}".format(image_id_list[1]))
         except Exception as err:
             datalab.fab.append_result("Failed creating image.", str(err))
             GCPActions.remove_instance(notebook_config['instance_name'], notebook_config['zone'])
@@ -239,7 +227,7 @@ if __name__ == "__main__":
 
     if os.environ['gpu_enabled'] == 'True':
         try:
-            print('[INSTALLING GPU DRIVERS]')
+            logging.info('[INSTALLING GPU DRIVERS]')
             params = "--hostname {} --keyfile {} --os_user {}".format(
                 instance_hostname, notebook_config['ssh_key_path'], notebook_config['datalab_ssh_user'])
             try:
@@ -254,7 +242,6 @@ if __name__ == "__main__":
             sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         additional_info = {
             'instance_hostname': instance_hostname,
@@ -291,18 +278,17 @@ if __name__ == "__main__":
             notebook_config['exploratory_name'])
         jupyter_ungit_access_url = "https://" + edge_instance_hostname + "/{}-ungit/".format(
             notebook_config['exploratory_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['instance_name']))
-        print("Private IP: {}".format(ip_address))
-        print("Instance type: {}".format(notebook_config['instance_type']))
-        print("Key name: {}".format(notebook_config['key_name']))
-        print("User key name: {}".format(os.environ['project_name']))
-        print("Jupyter URL: {}".format(jupyter_ip_url))
-        print("Ungit URL: {}".format(ungit_ip_url))
-        print("ReverseProxyNotebook".format(jupyter_notebook_access_url))
-        print("ReverseProxyUngit".format(jupyter_ungit_access_url))
-        print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(
+        logging.info("Instance name: {}".format(notebook_config['instance_name']))
+        logging.info("Private IP: {}".format(ip_address))
+        logging.info("Instance type: {}".format(notebook_config['instance_type']))
+        logging.info("Key name: {}".format(notebook_config['key_name']))
+        logging.info("User key name: {}".format(os.environ['project_name']))
+        logging.info("Jupyter URL: {}".format(jupyter_ip_url))
+        logging.info("Ungit URL: {}".format(ungit_ip_url))
+        logging.info("ReverseProxyNotebook".format(jupyter_notebook_access_url))
+        logging.info("ReverseProxyUngit".format(jupyter_ungit_access_url))
+        logging.info('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(
             notebook_config['key_name'], notebook_config['datalab_ssh_user'], ip_address))
 
         with open("/root/result.json", 'w') as result:
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/jupyterlab_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/jupyterlab_configure.py
index d1f1db1..100999a 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/jupyterlab_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/jupyterlab_configure.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -33,12 +33,6 @@ import subprocess
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         GCPMeta = datalab.meta_lib.GCPMeta()
         GCPActions = datalab.actions_lib.GCPActions()
@@ -105,7 +99,6 @@ if __name__ == "__main__":
             notebook_config['sudo_group'] = 'wheel'
 
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             instance_hostname, notebook_config['ssh_key_path'], notebook_config['initial_user'],
             notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
@@ -123,7 +116,6 @@ if __name__ == "__main__":
     # configuring proxy on Notebook instance
     try:
         logging.info('[CONFIGURE PROXY ON JUPYTERLAB INSTANCE]')
-        print('[CONFIGURE PROXY ON JUPYTERLAB INSTANCE]')
         additional_config = {"proxy_host": edge_instance_name, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}"\
             .format(instance_hostname, notebook_config['instance_name'], notebook_config['ssh_key_path'],
@@ -141,7 +133,6 @@ if __name__ == "__main__":
     # updating repositories & installing python packages
     try:
         logging.info('[INSTALLING PREREQUISITES TO JUPYTERLAB NOTEBOOK INSTANCE]')
-        print('[INSTALLING PREREQUISITES TO JUPYTERLAB NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}". \
             format(instance_hostname, notebook_config['ssh_key_path'], notebook_config['datalab_ssh_user'],
                    os.environ['gcp_region'], edge_instance_private_ip)
@@ -158,7 +149,6 @@ if __name__ == "__main__":
     # installing and configuring jupiter and all dependencies
     try:
         logging.info('[CONFIGURE JUPYTER NOTEBOOK INSTANCE]')
-        print('[CONFIGURE JUPYTER NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --edge_ip {} " \
                  "--region {} --spark_version {} " \
                  "--hadoop_version {} --os_user {} " \
@@ -179,7 +169,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": os.environ['project_name'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -197,7 +186,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, notebook_config['ssh_key_path'])
@@ -214,20 +202,20 @@ if __name__ == "__main__":
 
     if notebook_config['image_enabled'] == 'true':
         try:
-            print('[CREATING IMAGE]')
+            logging.info('[CREATING IMAGE]')
             primary_image_id = GCPMeta.get_image_by_name(notebook_config['expected_primary_image_name'])
             if primary_image_id == '':
-                print("Looks like it's first time we configure notebook server. Creating images.")
+                logging.info("Looks like it's first time we configure notebook server. Creating images.")
                 image_id_list = GCPActions.create_image_from_instance_disks(
                     notebook_config['expected_primary_image_name'], notebook_config['expected_secondary_image_name'],
                     notebook_config['instance_name'], notebook_config['zone'], notebook_config['image_labels'])
                 if image_id_list and image_id_list[0] != '':
-                    print("Image of primary disk was successfully created. It's ID is {}".format(image_id_list[0]))
+                    logging.info("Image of primary disk was successfully created. It's ID is {}".format(image_id_list[0]))
                 else:
-                    print("Looks like another image creating operation for your template have been started a "
+                    logging.info("Looks like another image creating operation for your template have been started a "
                           "moment ago.")
                 if image_id_list and image_id_list[1] != '':
-                    print("Image of secondary disk was successfully created. It's ID is {}".format(image_id_list[1]))
+                    logging.info("Image of secondary disk was successfully created. It's ID is {}".format(image_id_list[1]))
         except Exception as err:
             datalab.fab.append_result("Failed creating image.", str(err))
             GCPActions.remove_instance(notebook_config['instance_name'], notebook_config['zone'])
@@ -236,7 +224,6 @@ if __name__ == "__main__":
             sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         additional_info = {
             'instance_hostname': instance_hostname,
@@ -265,7 +252,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[CONFIGURING PROXY FOR DOCKER]')
         logging.info('[CONFIGURING PROXY FOR DOCKER]')
         params = "--hostname {} " \
                  "--keyfile {} " \
@@ -285,7 +271,6 @@ if __name__ == "__main__":
 
 
     try:
-        print('[STARTING JUPYTER CONTAINER]')
         logging.info('[STARTING JUPYTER CONTAINER]')
         params = "--hostname {} " \
                  "--keyfile {} " \
@@ -312,18 +297,17 @@ if __name__ == "__main__":
             notebook_config['exploratory_name'])
         jupyter_ungit_acces_url = "http://" + edge_instance_hostname + "/{}-ungit/".format(
             notebook_config['exploratory_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['instance_name']))
-        print("Private IP: {}".format(ip_address))
-        print("Instance type: {}".format(notebook_config['instance_type']))
-        print("Key name: {}".format(notebook_config['key_name']))
-        print("User key name: {}".format(os.environ['project_name']))
-        print("JupyterLab URL: {}".format(jupyter_ip_url))
-        print("Ungit URL: {}".format(ungit_ip_url))
-        print("ReverseProxyNotebook".format(jupyter_notebook_acces_url))
-        print("ReverseProxyUngit".format(jupyter_ungit_acces_url))
-        print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(
+        logging.info("Instance name: {}".format(notebook_config['instance_name']))
+        logging.info("Private IP: {}".format(ip_address))
+        logging.info("Instance type: {}".format(notebook_config['instance_type']))
+        logging.info("Key name: {}".format(notebook_config['key_name']))
+        logging.info("User key name: {}".format(os.environ['project_name']))
+        logging.info("JupyterLab URL: {}".format(jupyter_ip_url))
+        logging.info("Ungit URL: {}".format(ungit_ip_url))
+        logging.info("ReverseProxyNotebook".format(jupyter_notebook_acces_url))
+        logging.info("ReverseProxyUngit".format(jupyter_ungit_acces_url))
+        logging.info('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(
             notebook_config['key_name'], notebook_config['datalab_ssh_user'], ip_address))
 
         with open("/root/result.json", 'w') as result:
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/project_prepare.py b/infrastructure-provisioning/src/general/scripts/gcp/project_prepare.py
index 91b5d32..d5a06c9 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/project_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/project_prepare.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -34,16 +34,10 @@ import subprocess
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/project/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         GCPMeta = datalab.meta_lib.GCPMeta()
         GCPActions = datalab.actions_lib.GCPActions()
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         project_conf = dict()
         project_conf['edge_unique_index'] = str(uuid.uuid4())[:5]
         project_conf['ps_unique_index'] = str(uuid.uuid4())[:5]
@@ -134,12 +128,12 @@ if __name__ == "__main__":
                 subprocess.run('echo "{0}" >> {1}{2}.pub'.format(project_conf['user_key'], os.environ['conf_key_dir'],
                                                         project_conf['project_name']), shell=True, check=True)
             except:
-                print("ADMINSs PUBLIC KEY DOES NOT INSTALLED")
+                logging.info("ADMINSs PUBLIC KEY DOES NOT INSTALLED")
         except KeyError:
-            print("ADMINSs PUBLIC KEY DOES NOT UPLOADED")
+            logging.info("ADMINSs PUBLIC KEY DOES NOT UPLOADED")
             sys.exit(1)
 
-        print("Will create exploratory environment with edge node as access point as following: ".format(json.dumps(
+        logging.info("Will create exploratory environment with edge node as access point as following: ".format(json.dumps(
             project_conf, sort_keys=True, indent=4, separators=(',', ': '))))
         logging.info(json.dumps(project_conf))
     except Exception as err:
@@ -148,7 +142,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATE SUBNET]')
-        print('[CREATE SUBNET]')
         params = "--subnet_name {} --region {} --vpc_selflink {} --prefix {} --vpc_cidr {} --user_subnets_range '{}'" \
                  .format(project_conf['private_subnet_name'], project_conf['region'], project_conf['vpc_selflink'],
                          project_conf['private_subnet_prefix'], project_conf['vpc_cidr'],
@@ -164,15 +157,14 @@ if __name__ == "__main__":
         try:
             GCPActions.remove_subnet(project_conf['private_subnet_name'], project_conf['region'])
         except:
-            print("Subnet hasn't been created.")
+            logging.info("Subnet hasn't been created.")
         datalab.fab.append_result("Failed to create subnet.", str(err))
         sys.exit(1)
 
-    print('NEW SUBNET CIDR CREATED: {}'.format(project_conf['private_subnet_cidr']))
+    logging.info('NEW SUBNET CIDR CREATED: {}'.format(project_conf['private_subnet_cidr']))
 
     try:
         logging.info('[CREATE SERVICE ACCOUNT AND ROLE FOR EDGE NODE]')
-        print('[CREATE SERVICE ACCOUNT AND ROLE FOR EDGE NODE]')
         params = "--service_account_name {} --role_name {} --unique_index {} --service_base_name {}".format(
             project_conf['edge_service_account_name'], project_conf['edge_role_name'],
             project_conf['edge_unique_index'], project_conf['service_base_name'])
@@ -188,14 +180,13 @@ if __name__ == "__main__":
                                               project_conf['service_base_name'])
             GCPActions.remove_role(project_conf['edge_role_name'])
         except:
-            print("Service account or role hasn't been created")
+            logging.info("Service account or role hasn't been created")
         GCPActions.remove_subnet(project_conf['private_subnet_name'], project_conf['region'])
         datalab.fab.append_result("Failed to creating service account and role.", str(err))
         sys.exit(1)
 
     try:
         logging.info('[CREATE SERVICE ACCOUNT AND ROLE FOR PRIVATE SUBNET]')
-        print('[CREATE SERVICE ACCOUNT AND ROLE FOR NOTEBOOK NODE]')
         params = "--service_account_name {} --role_name {} --policy_path {} --roles_path {} --unique_index {} " \
                  "--service_base_name {}".format(
                   project_conf['ps_service_account_name'], project_conf['ps_role_name'], project_conf['ps_policy_path'],
@@ -212,7 +203,7 @@ if __name__ == "__main__":
                                               project_conf['service_base_name'])
             GCPActions.remove_role(project_conf['ps_role_name'])
         except:
-            print("Service account or role hasn't been created")
+            logging.info("Service account or role hasn't been created")
         GCPActions.remove_service_account(project_conf['edge_service_account_name'],
                                           project_conf['service_base_name'])
         GCPActions.remove_role(project_conf['edge_role_name'])
@@ -222,7 +213,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATE FIREWALL FOR EDGE NODE]')
-        print('[CREATE FIREWALL FOR EDGE NODE]')
         firewall_rules = dict()
         firewall_rules['ingress'] = []
         firewall_rules['egress'] = []
@@ -322,7 +312,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATE FIREWALL FOR PRIVATE SUBNET]')
-        print('[CREATE FIREWALL FOR PRIVATE SUBNET]')
         firewall_rules = dict()
         firewall_rules['ingress'] = []
         firewall_rules['egress'] = []
@@ -404,7 +393,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATE BUCKETS]')
-        print('[CREATE BUCKETS]')
         project_conf['shared_bucket_tags'] = {
             project_conf['tag_name']: project_conf['shared_bucket_name'],
             "endpoint_tag": project_conf['endpoint_tag'],
@@ -453,7 +441,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[SET PERMISSIONS FOR USER AND SHARED BUCKETS]')
-        print('[SET PERMISSIONS FOR USER AND SHARED BUCKETS]')
         GCPActions.set_bucket_owner(project_conf['bucket_name'], project_conf['ps_service_account_name'],
                                     project_conf['service_base_name'])
         GCPActions.set_bucket_owner(project_conf['shared_bucket_name'], project_conf['ps_service_account_name'],
@@ -478,7 +465,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATING STATIC IP ADDRESS]')
-        print('[CREATING STATIC IP ADDRESS]')
         params = "--address_name {} --region {}".format(project_conf['static_address_name'], project_conf['region'])
         try:
             subprocess.run("~/scripts/{}.py {}".format('edge_create_static_ip', params), shell=True, check=True)
@@ -490,7 +476,7 @@ if __name__ == "__main__":
         try:
             GCPActions.remove_static_address(project_conf['static_address_name'], project_conf['region'])
         except:
-            print("Static IP address hasn't been created.")
+            logging.info("Static IP address hasn't been created.")
         GCPActions.remove_bucket(project_conf['bucket_name'])
         GCPActions.remove_firewall(project_conf['fw_edge_ingress_public'])
         GCPActions.remove_firewall(project_conf['fw_edge_ingress_internal'])
@@ -518,7 +504,6 @@ if __name__ == "__main__":
         project_conf['static_ip'] = \
             GCPMeta.get_static_address(project_conf['region'], project_conf['static_address_name'])['address']
         logging.info('[CREATE EDGE INSTANCE]')
-        print('[CREATE EDGE INSTANCE]')
         params = "--instance_name {} --region {} --zone {} --vpc_name {} --subnet_name {} --instance_size {} " \
                  "--ssh_key_path {} --initial_user {} --service_account_name {} --image_name {} --instance_class {} " \
                  "--static_ip {} --network_tag {} --labels '{}' --service_base_name {}".format(
@@ -554,7 +539,6 @@ if __name__ == "__main__":
     if os.environ['edge_is_nat'] == 'true':
         try:
             logging.info('[CREATE NAT ROUTE]')
-            print('[REATE NAT ROUTE]')
             nat_route_name = '{0}-{1}-{2}-nat-route'.format(project_conf['service_base_name'],
                                                                   project_conf['project_name'],
                                                                   project_conf['endpoint_name'])
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/project_terminate.py b/infrastructure-provisioning/src/general/scripts/gcp/project_terminate.py
index 7e0dbfc..144ec05 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/project_terminate.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/project_terminate.py
@@ -25,7 +25,7 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import requests
 import sys
@@ -33,7 +33,7 @@ import traceback
 
 
 def terminate_edge_node(endpoint_name, project_name, service_base_name, region, zone):
-    print("Terminating Dataengine-service clusters")
+    logging.info("Terminating Dataengine-service clusters")
     try:
         labels = [
             {'sbn': service_base_name},
@@ -43,14 +43,14 @@ def terminate_edge_node(endpoint_name, project_name, service_base_name, region,
         if clusters_list:
             for cluster_name in clusters_list:
                 GCPActions.delete_dataproc_cluster(cluster_name, region)
-                print('The Dataproc cluster {} has been terminated successfully'.format(cluster_name))
+                logging.info('The Dataproc cluster {} has been terminated successfully'.format(cluster_name))
         else:
-            print("There are no Dataproc clusters to terminate.")
+            logging.info("There are no Dataproc clusters to terminate.")
     except Exception as err:
         datalab.fab.append_result("Failed to terminate dataengine-service", str(err))
         sys.exit(1)
 
-    print("Terminating EDGE and notebook instances")
+    logging.info("Terminating EDGE and notebook instances")
     base = '{}-{}-{}'.format(service_base_name, project_name, endpoint_name)
     keys = ['edge', 'ps', 'static-ip', 'bucket', 'subnet']
     targets = ['{}-{}'.format(base, k) for k in keys]
@@ -64,7 +64,7 @@ def terminate_edge_node(endpoint_name, project_name, service_base_name, region,
         datalab.fab.append_result("Failed to terminate instances", str(err))
         sys.exit(1)
 
-    print("Removing static addresses")
+    logging.info("Removing static addresses")
     try:
         static_addresses = GCPMeta.get_list_static_addresses(region, base)
         if 'items' in static_addresses:
@@ -75,7 +75,7 @@ def terminate_edge_node(endpoint_name, project_name, service_base_name, region,
         datalab.fab.append_result("Failed to remove static addresses", str(err))
         sys.exit(1)
 
-    print("Removing storage bucket")
+    logging.info("Removing storage bucket")
     try:
         buckets = GCPMeta.get_list_buckets(base)
         if 'items' in buckets:
@@ -86,7 +86,7 @@ def terminate_edge_node(endpoint_name, project_name, service_base_name, region,
         datalab.fab.append_result("Failed to remove storage buckets", str(err))
         sys.exit(1)
 
-    print("Removing project specific images")
+    logging.info("Removing project specific images")
     try:
         project_image_name_beginning = '{}-{}-{}'.format(service_base_name, project_name, endpoint_name)
         images = GCPMeta.get_list_images(project_image_name_beginning)
@@ -94,10 +94,10 @@ def terminate_edge_node(endpoint_name, project_name, service_base_name, region,
             for i in images['items']:
                 GCPActions.remove_image(i['name'])
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.info('Error: {0}'.format(err))
         sys.exit(1)
 
-    print("Removing firewalls")
+    logging.info("Removing firewalls")
     try:
         firewalls = GCPMeta.get_list_firewalls(base)
         if 'items' in firewalls:
@@ -108,7 +108,7 @@ def terminate_edge_node(endpoint_name, project_name, service_base_name, region,
         datalab.fab.append_result("Failed to remove security groups", str(err))
         sys.exit(1)
 
-    print("Removing Service accounts and roles")
+    logging.info("Removing Service accounts and roles")
     try:
         list_service_accounts = GCPMeta.get_list_service_accounts()
         sa_keys = ['edge-sa', 'ps-sa']
@@ -125,7 +125,7 @@ def terminate_edge_node(endpoint_name, project_name, service_base_name, region,
         datalab.fab.append_result("Failed to remove service accounts and roles", str(err))
         sys.exit(1)
 
-    print("Removing subnets")
+    logging.info("Removing subnets")
     try:
         list_subnets = GCPMeta.get_list_subnetworks(region, '', base)
         if 'items' in list_subnets:
@@ -139,7 +139,7 @@ def terminate_edge_node(endpoint_name, project_name, service_base_name, region,
         datalab.fab.append_result("Failed to remove subnets", str(err))
         sys.exit(1)
 
-    print("Removing nat route")
+    logging.info("Removing nat route")
     try:
         nat_route_name = '{0}-{1}-{2}-nat-route'.format(service_base_name, project_name, endpoint_name)
         route = GCPMeta.get_route(nat_route_name)
@@ -151,17 +151,10 @@ def terminate_edge_node(endpoint_name, project_name, service_base_name, region,
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/project/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     # generating variables dictionary
     GCPMeta = datalab.meta_lib.GCPMeta()
     GCPActions = datalab.actions_lib.GCPActions()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     project_conf = dict()
     project_conf['service_base_name'] = (os.environ['conf_service_base_name'])
     project_conf['project_name'] = (os.environ['project_name']).replace('_', '-').lower()
@@ -172,7 +165,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[TERMINATE EDGE]')
-        print('[TERMINATE EDGE]')
         try:
             terminate_edge_node(project_conf['endpoint_name'], project_conf['project_name'],
                                 project_conf['service_base_name'],
@@ -181,11 +173,10 @@ if __name__ == "__main__":
             traceback.print_exc()
             datalab.fab.append_result("Failed to terminate edge.", str(err))
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.info('Error: {0}'.format(err))
         sys.exit(1)
 
     try:
-        print('[KEYCLOAK PROJECT CLIENT DELETE]')
         logging.info('[KEYCLOAK PROJECT CLIENT DELETE]')
         keycloak_auth_server_url = '{}/realms/master/protocol/openid-connect/token'.format(
             os.environ['keycloak_auth_server_url'])
@@ -220,14 +211,14 @@ if __name__ == "__main__":
                                           headers={"Authorization": "Bearer " + keycloak_token.get("access_token"),
                                                    "Content-Type": "application/json"})
     except Exception as err:
-        print("Failed to remove project client from Keycloak", str(err))
+        logging.error("Failed to remove project client from Keycloak", str(err))
 
     try:
         with open("/root/result.json", 'w') as result:
             res = {"service_base_name": project_conf['service_base_name'],
                    "project_name": project_conf['project_name'],
                    "Action": "Terminate project"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/rstudio_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/rstudio_configure.py
index 3991d50..dae62df 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/rstudio_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/rstudio_configure.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -34,12 +34,6 @@ from fabric import *
 
 if __name__ == "__main__":
     instance_class = 'notebook'
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         GCPMeta = datalab.meta_lib.GCPMeta()
         GCPActions = datalab.actions_lib.GCPActions()
@@ -108,7 +102,6 @@ if __name__ == "__main__":
             notebook_config['sudo_group'] = 'wheel'
 
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             instance_hostname, notebook_config['ssh_key_path'], notebook_config['initial_user'],
             notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
@@ -126,7 +119,6 @@ if __name__ == "__main__":
     # configuring proxy on Notebook instance
     try:
         logging.info('[CONFIGURE PROXY ON RSTUDIO INSTANCE]')
-        print('[CONFIGURE PROXY ON RSTUDIO INSTANCE]')
         additional_config = {"proxy_host": edge_instance_name, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}" \
             .format(instance_hostname, notebook_config['instance_name'], notebook_config['ssh_key_path'],
@@ -145,7 +137,6 @@ if __name__ == "__main__":
     # updating repositories & installing python packages
     try:
         logging.info('[INSTALLING PREREQUISITES TO RSTUDIO NOTEBOOK INSTANCE]')
-        print('[INSTALLING PREREQUISITES TO RSTUDIO NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}". \
             format(instance_hostname, notebook_config['ssh_key_path'], notebook_config['datalab_ssh_user'],
                    os.environ['gcp_region'], edge_instance_private_ip)
@@ -162,7 +153,6 @@ if __name__ == "__main__":
     # installing and configuring RStudio and all dependencies
     try:
         logging.info('[CONFIGURE RSTUDIO NOTEBOOK INSTANCE]')
-        print('[CONFIGURE RSTUDIO NOTEBOOK INSTANCE]')
         params = "--hostname {0}  --keyfile {1} " \
                  "--region {2} --rstudio_pass {3} " \
                  "--rstudio_version {4} --os_user {5} " \
@@ -183,7 +173,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": os.environ['project_name'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -201,7 +190,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, notebook_config['ssh_key_path'])
@@ -218,20 +206,20 @@ if __name__ == "__main__":
         
     if notebook_config['image_enabled'] == 'true':
         try:
-            print('[CREATING IMAGE]')
+            logging.info('[CREATING IMAGE]')
             primary_image_id = GCPMeta.get_image_by_name(notebook_config['expected_primary_image_name'])
             if primary_image_id == '':
-                print("Looks like it's first time we configure notebook server. Creating images.")
+                logging.info("Looks like it's first time we configure notebook server. Creating images.")
                 image_id_list = GCPActions.create_image_from_instance_disks(
                     notebook_config['expected_primary_image_name'], notebook_config['expected_secondary_image_name'],
                     notebook_config['instance_name'], notebook_config['zone'], notebook_config['image_labels'])
                 if image_id_list and image_id_list[0] != '':
-                    print("Image of primary disk was successfully created. It's ID is {}".format(image_id_list[0]))
+                    logging.info("Image of primary disk was successfully created. It's ID is {}".format(image_id_list[0]))
                 else:
-                    print("Looks like another image creating operation for your template have been started a "
+                    logging.info("Looks like another image creating operation for your template have been started a "
                           "moment ago.")
                 if image_id_list and image_id_list[1] != '':
-                    print("Image of secondary disk was successfully created. It's ID is {}".format(image_id_list[1]))
+                    logging.info("Image of secondary disk was successfully created. It's ID is {}".format(image_id_list[1]))
         except Exception as err:
             datalab.fab.append_result("Failed creating image.", str(err))
             GCPActions.remove_instance(notebook_config['instance_name'], notebook_config['zone'])
@@ -240,7 +228,6 @@ if __name__ == "__main__":
             sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         additional_info = {
             'instance_hostname': instance_hostname,
@@ -264,7 +251,7 @@ if __name__ == "__main__":
             datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.info('Error: {0}'.format(err))
         datalab.fab.append_result("Failed to set edge reverse proxy template.", str(err))
         GCPActions.remove_instance(notebook_config['instance_name'], notebook_config['zone'])
         sys.exit(1)
@@ -278,18 +265,17 @@ if __name__ == "__main__":
             notebook_config['exploratory_name'])
         rstudio_ungit_access_url = "https://" + edge_instance_hostname + "/{}-ungit/".format(
             notebook_config['exploratory_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['instance_name']))
-        print("Private IP: {}".format(ip_address))
-        print("Instance type: {}".format(notebook_config['instance_type']))
-        print("Key name: {}".format(notebook_config['key_name']))
-        print("User key name: {}".format(os.environ['project_name']))
-        print("Rstudio URL: {}".format(rstudio_ip_url))
-        print("Rstudio user: {}".format(notebook_config['datalab_ssh_user']))
-        print("Rstudio pass: {}".format(notebook_config['rstudio_pass']))
-        print("Ungit URL: {}".format(ungit_ip_url))
-        print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(
+        logging.info("Instance name: {}".format(notebook_config['instance_name']))
+        logging.info("Private IP: {}".format(ip_address))
+        logging.info("Instance type: {}".format(notebook_config['instance_type']))
+        logging.info("Key name: {}".format(notebook_config['key_name']))
+        logging.info("User key name: {}".format(os.environ['project_name']))
+        logging.info("Rstudio URL: {}".format(rstudio_ip_url))
+        logging.info("Rstudio user: {}".format(notebook_config['datalab_ssh_user']))
+        logging.info("Rstudio pass: {}".format(notebook_config['rstudio_pass']))
+        logging.info("Ungit URL: {}".format(ungit_ip_url))
+        logging.info('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(
             notebook_config['key_name'], notebook_config['datalab_ssh_user'], ip_address))
 
         with open("/root/result.json", 'w') as result:
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/rstudio_dataengine-service_create_configs.py b/infrastructure-provisioning/src/general/scripts/gcp/rstudio_dataengine-service_create_configs.py
index 11e5283..d326408 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/rstudio_dataengine-service_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/rstudio_dataengine-service_create_configs.py
@@ -29,6 +29,7 @@ from datalab.actions_lib import *
 from datalab.common_lib import *
 from datalab.fab import *
 from datalab.notebook_lib import *
+from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -62,7 +63,7 @@ def configure_rstudio():
             subprocess.run('''R -e "source('/home/{}/.Rprofile')"'''.format(args.os_user), shell=True, check=True)
             subprocess.run('touch /home/' + args.os_user + '/.ensure_dir/rstudio_dataengine-service_ensured', shell=True, check=True)
         except Exception as err:
-            print('Error: {0}'.format(err))
+            logging.error('Error: {0}'.format(err))
             sys.exit(1)
     else:
         try:
@@ -76,7 +77,7 @@ def configure_rstudio():
             subprocess.run('echo \'HADOOP_CONF_DIR="' + yarn_dir + '"\' >> /home/' + args.os_user + '/.Renviron', shell=True, check=True)
             subprocess.run('''R -e "source('/home/{}/.Rprofile')"'''.format(args.os_user), shell=True, check=True)
         except Exception as err:
-            print('Error:', str(err))
+            logging.error('Error:', str(err))
             sys.exit(1)
 
 
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/ssn_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/ssn_configure.py
index 0afb38b..31e0876 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/ssn_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/ssn_configure.py
@@ -26,7 +26,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -39,12 +39,6 @@ parser.add_argument('--ssn_unique_index', type=str, default='')
 args = parser.parse_args()
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}.log".format(os.environ['conf_resource'], os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     def clear_resources():
         GCPActions.remove_instance(ssn_conf['instance_name'], ssn_conf['zone'])
         GCPActions.remove_static_address(ssn_conf['static_address_name'], ssn_conf['region'])
@@ -63,7 +57,6 @@ if __name__ == "__main__":
         GCPMeta = datalab.meta_lib.GCPMeta()
         GCPActions = datalab.actions_lib.GCPActions()
         logging.info('[DERIVING NAMES]')
-        print('[DERIVING NAMES]')
         ssn_conf = dict()
         ssn_conf['instance'] = 'ssn'
         ssn_conf['pre_defined_vpc'] = False
@@ -149,7 +142,6 @@ if __name__ == "__main__":
             ssn_conf['sudo_group'] = 'wheel'
 
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             ssn_conf['instance_hostname'], ssn_conf['ssh_key_path'], ssn_conf['initial_user'],
             ssn_conf['datalab_ssh_user'], ssn_conf['sudo_group'])
@@ -166,7 +158,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[INSTALLING PREREQUISITES TO SSN INSTANCE]')
-        print('[INSTALLING PREREQUISITES TO SSN INSTANCE]')
         params = "--hostname {} --keyfile {} --pip_packages " \
                  "'boto3 bcrypt==3.1.7 backoff argparse fabric awscli pymongo pyyaml " \
                  "google-api-python-client google-cloud-storage pycryptodome' --user {} --region {}". \
@@ -185,7 +176,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CONFIGURE SSN INSTANCE]')
-        print('[CONFIGURE SSN INSTANCE]')
         additional_config = {"nginx_template_dir": "/root/templates/",
                              "service_base_name": ssn_conf['service_base_name'],
                              "security_group_id": ssn_conf['firewall_name'], "vpc_id": ssn_conf['vpc_name'],
@@ -208,7 +198,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CONFIGURING DOCKER AT SSN INSTANCE]')
-        print('[CONFIGURING DOCKER AT SSN INSTANCE]')
         additional_config = [{"name": "base", "tag": "latest"},
                              {"name": "project", "tag": "latest"},
                              {"name": "edge", "tag": "latest"},
@@ -240,7 +229,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CONFIGURE SSN INSTANCE UI]')
-        print('[CONFIGURE SSN INSTANCE UI]')
 
         cloud_params = [
             {
@@ -532,7 +520,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     logging.info('[CREATE KEYCLOAK CLIENT]')
-    print('[CREATE KEYCLOAK CLIENT]')
     keycloak_params = "--service_base_name {} --keycloak_auth_server_url {} --keycloak_realm_name {} " \
                       "--keycloak_user {} --keycloak_user_password {} --instance_public_ip {} --keycloak_client_secret {} " \
         .format(ssn_conf['service_base_name'], os.environ['keycloak_auth_server_url'],
@@ -548,29 +535,28 @@ if __name__ == "__main__":
 
     try:
         logging.info('[SUMMARY]')
-        print('[SUMMARY]')
-        print("Service base name: {}".format(ssn_conf['service_base_name']))
-        print("SSN Name: {}".format(ssn_conf['instance_name']))
-        print("SSN Hostname: {}".format(ssn_conf['instance_hostname']))
-        print("Role name: {}".format(ssn_conf['role_name']))
-        print("Key name: {}".format(os.environ['conf_key_name']))
-        print("VPC Name: {}".format(ssn_conf['vpc_name']))
-        print("Subnet Name: {}".format(ssn_conf['subnet_name']))
-        print("Firewall Names: {}".format(ssn_conf['firewall_name']))
-        print("SSN instance size: {}".format(ssn_conf['instance_size']))
-        print("SSN AMI name: {}".format(ssn_conf['image_name']))
-        print("Region: {}".format(ssn_conf['region']))
+        logging.info("Service base name: {}".format(ssn_conf['service_base_name']))
+        logging.info("SSN Name: {}".format(ssn_conf['instance_name']))
+        logging.info("SSN Hostname: {}".format(ssn_conf['instance_hostname']))
+        logging.info("Role name: {}".format(ssn_conf['role_name']))
+        logging.info("Key name: {}".format(os.environ['conf_key_name']))
+        logging.info("VPC Name: {}".format(ssn_conf['vpc_name']))
+        logging.info("Subnet Name: {}".format(ssn_conf['subnet_name']))
+        logging.info("Firewall Names: {}".format(ssn_conf['firewall_name']))
+        logging.info("SSN instance size: {}".format(ssn_conf['instance_size']))
+        logging.info("SSN AMI name: {}".format(ssn_conf['image_name']))
+        logging.info("Region: {}".format(ssn_conf['region']))
         jenkins_url = "http://{}/jenkins".format(ssn_conf['instance_hostname'])
         jenkins_url_https = "https://{}/jenkins".format(ssn_conf['instance_hostname'])
-        print("Jenkins URL: {}".format(jenkins_url))
-        print("Jenkins URL HTTPS: {}".format(jenkins_url_https))
-        print("DataLab UI HTTP URL: http://{}".format(ssn_conf['instance_hostname']))
-        print("DataLab UI HTTPS URL: https://{}".format(ssn_conf['instance_hostname']))
+        logging.info("Jenkins URL: {}".format(jenkins_url))
+        logging.info("Jenkins URL HTTPS: {}".format(jenkins_url_https))
+        logging.info("DataLab UI HTTP URL: http://{}".format(ssn_conf['instance_hostname']))
+        logging.info("DataLab UI HTTPS URL: https://{}".format(ssn_conf['instance_hostname']))
         try:
             with open('jenkins_creds.txt') as f:
-                print(f.read())
+                logging.info(f.read())
         except:
-            print("Jenkins is either configured already or have issues in configuration routine.")
+            logging.info("Jenkins is either configured already or have issues in configuration routine.")
 
         with open("/root/result.json", 'w') as f:
             res = {"service_base_name": ssn_conf['service_base_name'],
@@ -586,7 +572,7 @@ if __name__ == "__main__":
                    "action": "Create SSN instance"}
             f.write(json.dumps(res))
 
-        print('Upload response file')
+        logging.info('Upload response file')
         params = "--instance_name {} --local_log_filepath {} --os_user {} --instance_hostname {}". \
             format(ssn_conf['instance_name'], local_log_filepath, ssn_conf['datalab_ssh_user'],
                    ssn_conf['instance_hostname'])
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/ssn_create_static_ip.py b/infrastructure-provisioning/src/general/scripts/gcp/ssn_create_static_ip.py
index 58bf787..3564fb9 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/ssn_create_static_ip.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/ssn_create_static_ip.py
@@ -27,6 +27,7 @@ import sys
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--address_name', type=str, default='')
@@ -34,17 +35,12 @@ parser.add_argument('--region', type=str, default='')
 args = parser.parse_args()
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}.log".format(os.environ['conf_resource'], os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         if GCPMeta().get_static_address(args.region, args.address_name):
-            print("REQUESTED STATIC ADDRESS {} ALREADY EXISTS".format(args.address_name))
+            logging.info("REQUESTED STATIC ADDRESS {} ALREADY EXISTS".format(args.address_name))
         else:
-            print("Creating Elastic IP")
+            logging.info("Creating Elastic IP")
             GCPActions().create_static_address(args.address_name, args.region)
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/ssn_create_vpc.py b/infrastructure-provisioning/src/general/scripts/gcp/ssn_create_vpc.py
index d6bd97d..a453b36 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/ssn_create_vpc.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/ssn_create_vpc.py
@@ -24,6 +24,7 @@
 import argparse
 from datalab.actions_lib import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--vpc_name', type=str, default='')
@@ -32,10 +33,10 @@ args = parser.parse_args()
 if __name__ == "__main__":
     if args.vpc_name != '':
         if GCPMeta().get_vpc(args.vpc_name):
-            print("REQUESTED VPC {} ALREADY EXISTS".format(args.vpc_name))
+            logging.info("REQUESTED VPC {} ALREADY EXISTS".format(args.vpc_name))
         else:
-            print("Creating VPC {}".format(args.vpc_name))
+            logging.info("Creating VPC {}".format(args.vpc_name))
             GCPActions().create_vpc(args.vpc_name)
     else:
-        print("VPC name can't be empty.")
+        logging.error("VPC name can't be empty.")
         sys.exit(1)
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/ssn_finalize.py b/infrastructure-provisioning/src/general/scripts/gcp/ssn_finalize.py
index 93761f1..a29d398 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/ssn_finalize.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/ssn_finalize.py
@@ -25,6 +25,7 @@ import argparse
 import boto3
 import sys
 from datalab.ssn_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--key_id', type=str, default='')
@@ -37,7 +38,7 @@ def cleanup(key_id):
         current_user = iam.CurrentUser()
         for user_key in current_user.access_keys.all():
             if user_key.id == key_id:
-                print("Deleted key {}".format(user_key.id))
+                logging.info("Deleted key {}".format(user_key.id))
                 user_key.delete()
         return True
     except:
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/ssn_prepare.py b/infrastructure-provisioning/src/general/scripts/gcp/ssn_prepare.py
index f8a269c..59f0658 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/ssn_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/ssn_prepare.py
@@ -26,7 +26,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -38,11 +38,6 @@ parser.add_argument('--ssn_unique_index', type=str, default='')
 args = parser.parse_args()
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}.log".format(os.environ['conf_resource'], os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         GCPMeta = datalab.meta_lib.GCPMeta()
         GCPActions = datalab.actions_lib.GCPActions()
@@ -52,7 +47,6 @@ if __name__ == "__main__":
         ssn_conf['pre_defined_subnet'] = False
         ssn_conf['pre_defined_firewall'] = False
         logging.info('[DERIVING NAMES]')
-        print('[DERIVING NAMES]')
         ssn_conf['ssn_unique_index'] = args.ssn_unique_index
         ssn_conf['service_base_name'] = os.environ['conf_service_base_name'] = datalab.fab.replace_multi_symbols(
             os.environ['conf_service_base_name'].replace('_', '-').lower()[:20], '-', True)
@@ -95,7 +89,6 @@ if __name__ == "__main__":
     except KeyError:
         try:
             logging.info('[CREATE VPC]')
-            print('[CREATE VPC]')
             params = "--vpc_name {}".format(ssn_conf['vpc_name'])
             try:
                 subprocess.run("~/scripts/{}.py {}".format('ssn_create_vpc', params), shell=True, check=True)
@@ -109,7 +102,7 @@ if __name__ == "__main__":
                 try:
                     GCPActions.remove_vpc(ssn_conf['vpc_name'])
                 except:
-                    print("VPC hasn't been created.")
+                    logging.error("VPC hasn't been created.")
             sys.exit(1)
 
     try:
@@ -122,7 +115,6 @@ if __name__ == "__main__":
     except KeyError:
         try:
             logging.info('[CREATE SUBNET]')
-            print('[CREATE SUBNET]')
             params = "--subnet_name {} --region {} --vpc_selflink {} --prefix {} --vpc_cidr {} --ssn {}".\
                 format(ssn_conf['subnet_name'], ssn_conf['region'], ssn_conf['vpc_selflink'], ssn_conf['subnet_prefix'],
                        ssn_conf['vpc_cidr'], True)
@@ -138,7 +130,7 @@ if __name__ == "__main__":
                 try:
                     GCPActions.remove_subnet(ssn_conf['subnet_name'], ssn_conf['region'])
                 except:
-                    print("Subnet hasn't been created.")
+                    logging.error("Subnet hasn't been created.")
             if not ssn_conf['pre_defined_vpc']:
                 GCPActions.remove_vpc(ssn_conf['vpc_name'])
             sys.exit(1)
@@ -153,7 +145,6 @@ if __name__ == "__main__":
     except KeyError:
         try:
             logging.info('[CREATE FIREWALL]')
-            print('[CREATE FIREWALL]')
             if os.environ['conf_allowed_ip_cidr'] != '0.0.0.0/0':
                 ssn_conf['allowed_ip_cidr'] = ssn_conf['allowed_ip_cidr'].split(', ')
             else:
@@ -208,7 +199,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATE SERVICE ACCOUNT AND ROLE]')
-        print('[CREATE SERVICE ACCOUNT AND ROLE]')
         params = "--service_account_name {} --role_name {} --policy_path {} --roles_path {} --unique_index {} " \
                  "--service_base_name {}".format( ssn_conf['service_account_name'], ssn_conf['role_name'],
                                                   ssn_conf['ssn_policy_path'], ssn_conf['ssn_roles_path'],
@@ -224,7 +214,7 @@ if __name__ == "__main__":
             GCPActions.remove_service_account(ssn_conf['service_account_name'], ssn_conf['service_base_name'])
             GCPActions.remove_role(ssn_conf['role_name'])
         except:
-            print("Service account hasn't been created")
+            logging.error("Service account hasn't been created")
         if not ssn_conf['pre_defined_firewall']:
             GCPActions.remove_firewall('{}-ingress'.format(ssn_conf['firewall_name']))
             GCPActions.remove_firewall('{}-egress'.format(ssn_conf['firewall_name']))
@@ -236,7 +226,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATING STATIC IP ADDRESS]')
-        print('[CREATING STATIC IP ADDRESS]')
         params = "--address_name {} --region {}".format(ssn_conf['static_address_name'], ssn_conf['region'])
         try:
             subprocess.run("~/scripts/{}.py {}".format('ssn_create_static_ip', params), shell=True, check=True)
@@ -248,7 +237,7 @@ if __name__ == "__main__":
         try:
             GCPActions.remove_static_address(ssn_conf['static_address_name'], ssn_conf['region'])
         except:
-            print("Static IP address hasn't been created.")
+            logging.error("Static IP address hasn't been created.")
         GCPActions.remove_service_account(ssn_conf['service_account_name'], ssn_conf['service_base_name'])
         GCPActions.remove_role(ssn_conf['role_name'])
         GCPActions.remove_bucket(ssn_conf['ssn_bucket_name'])
@@ -273,7 +262,6 @@ if __name__ == "__main__":
         ssn_conf['static_ip'] = GCPMeta.get_static_address(ssn_conf['region'],
                                                            ssn_conf['static_address_name'])['address']
         logging.info('[CREATE SSN INSTANCE]')
-        print('[CREATE SSN INSTANCE]')
         params = "--instance_name {0} --region {1} --zone {2} --vpc_name {3} --subnet_name {4} --instance_size {5}"\
                  " --ssh_key_path {6} --initial_user {7} --service_account_name {8} --image_name {9}"\
                  " --instance_class {10} --static_ip {11} --network_tag {12} --labels '{13}' " \
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/ssn_terminate.py b/infrastructure-provisioning/src/general/scripts/gcp/ssn_terminate.py
index e703df4..6f6c9cd 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/ssn_terminate.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/ssn_terminate.py
@@ -23,7 +23,7 @@
 
 import datalab.ssn_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -31,13 +31,8 @@ import subprocess
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}.log".format(os.environ['conf_resource'], os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     # generating variables dictionary
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     ssn_conf = dict()
     ssn_conf['service_base_name'] = datalab.fab.replace_multi_symbols(
         os.environ['conf_service_base_name'].replace('_', '-').lower()[:20], '-', True)
@@ -55,7 +50,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[TERMINATE SSN]')
-        print('[TERMINATE SSN]')
         params = "--service_base_name {} --region {} --zone {} --pre_defined_vpc {} --vpc_name {}".format(
             ssn_conf['service_base_name'], ssn_conf['region'], ssn_conf['zone'], pre_defined_vpc, ssn_conf['vpc_name'])
         try:
@@ -68,7 +62,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[KEYCLOAK SSN CLIENT DELETE]')
         logging.info('[KEYCLOAK SSN CLIENT DELETE]')
         keycloak_auth_server_url = '{}/realms/master/protocol/openid-connect/token'.format(os.environ['keycloak_auth_server_url'])
         keycloak_client_url = '{0}/admin/realms/{1}/clients'.format(os.environ['keycloak_auth_server_url'],
@@ -87,13 +80,13 @@ if __name__ == "__main__":
         keycloak_client = requests.delete(keycloak_client_delete_url, headers={"Authorization": "Bearer {}"
                                           .format(keycloak_token.get("access_token")), "Content-Type": "application/json"})
     except Exception as err:
-        print("Failed to remove ssn client from Keycloak", str(err))
+        logging.error("Failed to remove ssn client from Keycloak", str(err))
 
     try:
         with open("/root/result.json", 'w') as result:
             res = {"service_base_name": ssn_conf['service_base_name'],
                    "Action": "Terminate ssn with all service_base_name environment"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/ssn_terminate_gcp_resources.py b/infrastructure-provisioning/src/general/scripts/gcp/ssn_terminate_gcp_resources.py
index 5464ec3..c0938f6 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/ssn_terminate_gcp_resources.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/ssn_terminate_gcp_resources.py
@@ -26,6 +26,7 @@ import sys
 from datalab.actions_lib import *
 from datalab.meta_lib import *
 from datalab.ssn_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--zone', type=str)
@@ -43,7 +44,7 @@ args = parser.parse_args()
 if __name__ == "__main__":
     GCPMeta = datalab.meta_lib.GCPMeta()
     GCPActions = datalab.actions_lib.GCPActions()
-    print("Terminating Dataengine-service clusters")
+    logging.info("Terminating Dataengine-service clusters")
     try:
         labels = [
             {'sbn': args.service_base_name}
@@ -52,54 +53,54 @@ if __name__ == "__main__":
         if clusters_list:
             for cluster_name in clusters_list:
                 GCPActions.delete_dataproc_cluster(cluster_name, args.region)
-                print('The Dataproc cluster {} has been terminated successfully'.format(cluster_name))
+                logging.info('The Dataproc cluster {} has been terminated successfully'.format(cluster_name))
         else:
-            print("There are no Dataproc clusters to terminate.")
+            logging.info("There are no Dataproc clusters to terminate.")
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
 
-    print("Terminating instances")
+    logging.info("Terminating instances")
     try:
         instances = GCPMeta.get_list_instances(args.zone, args.service_base_name)
         if 'items' in instances:
             for i in instances['items']:
                 GCPActions.remove_instance(i['name'], args.zone)
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
 
-    print("Removing images")
+    logging.info("Removing images")
     try:
         images = GCPMeta.get_list_images(args.service_base_name)
         if 'items' in images:
             for i in images['items']:
                 GCPActions.remove_image(i['name'])
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
 
-    print("Removing static addresses")
+    logging.info("Removing static addresses")
     try:
         static_addresses = GCPMeta.get_list_static_addresses(args.region, args.service_base_name)
         if 'items' in static_addresses:
             for i in static_addresses['items']:
                 GCPActions.remove_static_address(i['name'], args.region)
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
 
-    print("Removing firewalls")
+    logging.info("Removing firewalls")
     try:
         firewalls = GCPMeta.get_list_firewalls(args.service_base_name)
         if 'items' in firewalls:
             for i in firewalls['items']:
                 GCPActions.remove_firewall(i['name'])
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
 
-    print("Removing Service accounts and roles")
+    logging.info("Removing Service accounts and roles")
     try:
         list_service_accounts = GCPMeta.get_list_service_accounts()
         for service_account in list_service_accounts:
@@ -110,10 +111,10 @@ if __name__ == "__main__":
             if role.startswith(args.service_base_name):
                 GCPActions.remove_role(role)
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
 
-    print("Removing subnets")
+    logging.info("Removing subnets")
     try:
         list_subnets = GCPMeta.get_list_subnetworks(args.region, '', args.service_base_name)
         if 'items' in list_subnets:
@@ -122,26 +123,26 @@ if __name__ == "__main__":
             for i in subnets['items']:
                 GCPActions.remove_subnet(i['name'], args.region)
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
 
-    print("Removing s3 buckets")
+    logging.info("Removing s3 buckets")
     try:
         buckets = GCPMeta.get_list_buckets(args.service_base_name)
         if 'items' in buckets:
             for i in buckets['items']:
                 GCPActions.remove_bucket(i['name'])
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
 
-    print("Removing SSN VPC")
+    logging.info("Removing SSN VPC")
     if args.pre_defined_vpc != 'True':
         try:
             GCPActions.remove_vpc(args.vpc_name)
         except Exception as err:
-            print('Error: {0}'.format(err))
-            print("No such VPC")
+            logging.error('Error: {0}'.format(err))
+            logging.error("No such VPC")
             sys.exit(1)
     else:
-        print('VPC is predefined, VPC will not be deleted')
+        logging.info('VPC is predefined, VPC will not be deleted')
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/superset_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/superset_configure.py
index 0f57a46..709a534 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/superset_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/superset_configure.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import requests
 import sys
@@ -35,12 +35,6 @@ import subprocess
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         GCPMeta = datalab.meta_lib.GCPMeta()
         GCPActions = datalab.actions_lib.GCPActions()
@@ -107,7 +101,6 @@ if __name__ == "__main__":
             notebook_config['sudo_group'] = 'wheel'
 
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             instance_hostname, notebook_config['ssh_key_path'], notebook_config['initial_user'],
             notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
@@ -125,7 +118,6 @@ if __name__ == "__main__":
     # configuring proxy on Notebook instance
     try:
         logging.info('[CONFIGURE PROXY ON SUPERSET INSTANCE]')
-        print('[CONFIGURE PROXY ON SUPERSET INSTANCE]')
         additional_config = {"proxy_host": edge_instance_private_ip, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}"\
             .format(instance_hostname, notebook_config['instance_name'], notebook_config['ssh_key_path'],
@@ -141,7 +133,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[CONFIGURE KEYCLOAK]')
         logging.info('[CONFIGURE KEYCLOAK]')
         keycloak_auth_server_url = '{}/realms/master/protocol/openid-connect/token'.format(
             os.environ['keycloak_auth_server_url'])
@@ -167,7 +158,7 @@ if __name__ == "__main__":
             json_keycloak_client_id = json.loads(keycloak_get_id_client.text)
             # Check, if response is not empty
             if len(json_keycloak_client_id) != 0:
-                print('Keycloak client {} exists. Getting his required attributes.'.format(keycloak_client_id))
+                logging.info('Keycloak client {} exists. Getting his required attributes.'.format(keycloak_client_id))
                 keycloak_id_client = json_keycloak_client_id[0]['id']
                 keycloak_client_get_secret_url = ("{0}/{1}/client-secret".format(keycloak_client_create_url,
                                                                                  keycloak_id_client))
@@ -178,7 +169,7 @@ if __name__ == "__main__":
                 json_keycloak_client_secret = json.loads(keycloak_client_get_secret.text)
                 keycloak_client_secret = json_keycloak_client_secret['value']
             else:
-                print('Keycloak client does not exists. Creating new client {0}.'.format(keycloak_client_id))
+                logging.info('Keycloak client does not exists. Creating new client {0}.'.format(keycloak_client_id))
                 keycloak_client_secret = str(uuid.uuid4())
                 keycloak_client_data = {
                     "clientId": keycloak_client_id,
@@ -201,7 +192,6 @@ if __name__ == "__main__":
     # updating repositories & installing and configuring superset
     try:
         logging.info('[CONFIGURE SUPERSET NOTEBOOK INSTANCE]')
-        print('[CONFIGURE SUPERSET NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} " \
                  "--region {} --os_user {} " \
                  "--datalab_path {} --keycloak_auth_server_url {} " \
@@ -225,7 +215,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": os.environ['project_name'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -243,7 +232,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, notebook_config['ssh_key_path'])
@@ -260,20 +248,20 @@ if __name__ == "__main__":
 
     if notebook_config['image_enabled'] == 'true':
         try:
-            print('[CREATING IMAGE]')
+            logging.info('[CREATING IMAGE]')
             primary_image_id = GCPMeta.get_image_by_name(notebook_config['expected_primary_image_name'])
             if primary_image_id == '':
-                print("Looks like it's first time we configure notebook server. Creating images.")
+                logging.info("Looks like it's first time we configure notebook server. Creating images.")
                 image_id_list = GCPActions.create_image_from_instance_disks(
                     notebook_config['expected_primary_image_name'], notebook_config['expected_secondary_image_name'],
                     notebook_config['instance_name'], notebook_config['zone'], notebook_config['image_labels'])
                 if image_id_list and image_id_list[0] != '':
-                    print("Image of primary disk was successfully created. It's ID is {}".format(image_id_list[0]))
+                    logging.info("Image of primary disk was successfully created. It's ID is {}".format(image_id_list[0]))
                 else:
-                    print("Looks like another image creating operation for your template have been started a "
+                    logging.info("Looks like another image creating operation for your template have been started a "
                           "moment ago.")
                 if image_id_list and image_id_list[1] != '':
-                    print("Image of secondary disk was successfully created. It's ID is {}".format(image_id_list[1]))
+                    logging.info("Image of secondary disk was successfully created. It's ID is {}".format(image_id_list[1]))
         except Exception as err:
             datalab.fab.append_result("Failed creating image.", str(err))
             GCPActions.remove_instance(notebook_config['instance_name'], notebook_config['zone'])
@@ -282,7 +270,6 @@ if __name__ == "__main__":
             sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         additional_info = {
             'instance_hostname': instance_hostname,
@@ -311,7 +298,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[CONFIGURING PROXY FOR DOCKER]')
         logging.info('[CONFIGURING PROXY FOR DOCKER]')
         params = "--hostname {} " \
                  "--keyfile {} " \
@@ -330,7 +316,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[STARTING SUPERSET]')
         logging.info('[STARTING SUPERSET]')
         params = "--hostname {} " \
                  "--keyfile {} " \
@@ -356,18 +341,17 @@ if __name__ == "__main__":
         superset_notebook_acces_url = "http://" + edge_instance_hostname + "/{}/".format(notebook_config['exploratory_name'])
         superset_ungit_acces_url = "http://" + edge_instance_hostname + "/{}-ungit/".format(
             notebook_config['exploratory_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['instance_name']))
-        print("Private IP: {}".format(ip_address))
-        print("Instance type: {}".format(notebook_config['instance_type']))
-        print("Key name: {}".format(notebook_config['key_name']))
-        print("User key name: {}".format(os.environ['project_name']))
-        print("SUPERSET URL: {}".format(superset_ip_url))
-        print("Ungit URL: {}".format(ungit_ip_url))
-        print("ReverseProxyNotebook".format(superset_notebook_acces_url))
-        print("ReverseProxyUngit".format(superset_ungit_acces_url))
-        print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(notebook_config['key_name'],
+        logging.info("Instance name: {}".format(notebook_config['instance_name']))
+        logging.info("Private IP: {}".format(ip_address))
+        logging.info("Instance type: {}".format(notebook_config['instance_type']))
+        logging.info("Key name: {}".format(notebook_config['key_name']))
+        logging.info("User key name: {}".format(os.environ['project_name']))
+        logging.info("SUPERSET URL: {}".format(superset_ip_url))
+        logging.info("Ungit URL: {}".format(ungit_ip_url))
+        logging.info("ReverseProxyNotebook".format(superset_notebook_acces_url))
+        logging.info("ReverseProxyUngit".format(superset_ungit_acces_url))
+        logging.info('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(notebook_config['key_name'],
                                                                                            notebook_config[
                                                                                                'datalab_ssh_user'],
                                                                                            ip_address))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/tensor-rstudio_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/tensor-rstudio_configure.py
index f201944..a1a990d 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/tensor-rstudio_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/tensor-rstudio_configure.py
@@ -26,7 +26,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -39,12 +39,6 @@ args = parser.parse_args()
 
 if __name__ == "__main__":
     instance_class = 'notebook'
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         GCPMeta = datalab.meta_lib.GCPMeta()
         GCPActions = datalab.actions_lib.GCPActions()
@@ -112,7 +106,6 @@ if __name__ == "__main__":
             notebook_config['sudo_group'] = 'wheel'
 
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             instance_hostname, "{}{}.pem".format(os.environ['conf_key_dir'], os.environ['conf_key_name']),
             notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
@@ -130,7 +123,6 @@ if __name__ == "__main__":
     # configuring proxy on Notebook instance
     try:
         logging.info('[CONFIGURE PROXY ON TENSORFLOW-RSTUDIO INSTANCE]')
-        print('[CONFIGURE PROXY ON TENSORFLOW-RSTUDIO INSTANCE]')
         additional_config = {"proxy_host": edge_instance_name, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}" \
             .format(instance_hostname, notebook_config['instance_name'], notebook_config['ssh_key_path'],
@@ -148,7 +140,6 @@ if __name__ == "__main__":
     # updating repositories & installing python packages
     try:
         logging.info('[INSTALLING PREREQUISITES TO TENSORFLOW-RSTUDIO NOTEBOOK INSTANCE]')
-        print('[INSTALLING PREREQUISITES TO TENSORFLOW-RSTUDIO NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}". \
             format(instance_hostname, notebook_config['ssh_key_path'], notebook_config['datalab_ssh_user'],
                    os.environ['gcp_region'], edge_instance_private_ip)
@@ -165,7 +156,6 @@ if __name__ == "__main__":
     # installing and configuring TensorFlow and RSTUDIO and all dependencies
     try:
         logging.info('[CONFIGURE TENSORFLOW-RSTUDIO NOTEBOOK INSTANCE]')
-        print('[CONFIGURE TENSORFLOW-RSTUDIO NOTEBOOK INSTANCE]')
         params = "--hostname {}  --keyfile {} " \
                  "--region {} --rstudio_pass {} " \
                  "--rstudio_version {} --os_user {} " \
@@ -185,7 +175,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": os.environ['project_name'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -203,7 +192,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, notebook_config['ssh_key_path'])
@@ -220,20 +208,20 @@ if __name__ == "__main__":
 
     if notebook_config['image_enabled'] == 'true':
         try:
-            print('[CREATING IMAGE]')
+            logging.info('[CREATING IMAGE]')
             primary_image_id = GCPMeta.get_image_by_name(notebook_config['expected_primary_image_name'])
             if primary_image_id == '':
-                print("Looks like it's first time we configure notebook server. Creating images.")
+                logging.info("Looks like it's first time we configure notebook server. Creating images.")
                 image_id_list = GCPActions.create_image_from_instance_disks(
                     notebook_config['expected_primary_image_name'], notebook_config['expected_secondary_image_name'],
                     notebook_config['instance_name'], notebook_config['zone'], notebook_config['image_labels'])
                 if image_id_list and image_id_list[0] != '':
-                    print("Image of primary disk was successfully created. It's ID is {}".format(image_id_list[0]))
+                    logging.info("Image of primary disk was successfully created. It's ID is {}".format(image_id_list[0]))
                 else:
-                    print(
+                    logging.info(
                         "Looks like another image creating operation for your template have been started a moment ago.")
                 if image_id_list and image_id_list[1] != '':
-                    print("Image of secondary disk was successfully created. It's ID is {}".format(image_id_list[1]))
+                    logging.info("Image of secondary disk was successfully created. It's ID is {}".format(image_id_list[1]))
         except Exception as err:
             datalab.fab.append_result("Failed creating image.", str(err))
             GCPActions.remove_instance(notebook_config['instance_name'], notebook_config['zone'])
@@ -242,7 +230,6 @@ if __name__ == "__main__":
             sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         additional_info = {
             'instance_hostname': instance_hostname,
@@ -282,20 +269,19 @@ if __name__ == "__main__":
             notebook_config['exploratory_name'])
         rstudio_ungit_access_url = "https://" + edge_instance_hostname + "/{}-ungit/".format(
             notebook_config['exploratory_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['instance_name']))
-        print("Private IP: {}".format(ip_address))
-        print("Instance type: {}".format(notebook_config['instance_type']))
-        print("Key name: {}".format(notebook_config['key_name']))
-        print("User key name: {}".format(os.environ['project_name']))
-        print("TensorBoard URL: {}".format(tensorboard_url))
-        print("TensorBoard log dir: /var/log/tensorboard")
-        print("Rstudio URL: {}".format(rstudio_ip_url))
-        print("Rstudio user: {}".format(notebook_config['datalab_ssh_user']))
-        print("Rstudio pass: {}".format(notebook_config['rstudio_pass']))
-        print("Ungit URL: {}".format(ungit_ip_url))
-        print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(
+        logging.info("Instance name: {}".format(notebook_config['instance_name']))
+        logging.info("Private IP: {}".format(ip_address))
+        logging.info("Instance type: {}".format(notebook_config['instance_type']))
+        logging.info("Key name: {}".format(notebook_config['key_name']))
+        logging.info("User key name: {}".format(os.environ['project_name']))
+        logging.info("TensorBoard URL: {}".format(tensorboard_url))
+        logging.info("TensorBoard log dir: /var/log/tensorboard")
+        logging.info("Rstudio URL: {}".format(rstudio_ip_url))
+        logging.info("Rstudio user: {}".format(notebook_config['datalab_ssh_user']))
+        logging.info("Rstudio pass: {}".format(notebook_config['rstudio_pass']))
+        logging.info("Ungit URL: {}".format(ungit_ip_url))
+        logging.info('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(
             notebook_config['key_name'], notebook_config['datalab_ssh_user'], ip_address))
 
         with open("/root/result.json", 'w') as result:
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/tensor_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/tensor_configure.py
index 9708b3b..dd67bfa 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/tensor_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/tensor_configure.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -34,12 +34,6 @@ from fabric import *
 
 if __name__ == "__main__":
     instance_class = 'notebook'
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         GCPMeta = datalab.meta_lib.GCPMeta()
         GCPActions = datalab.actions_lib.GCPActions()
@@ -106,7 +100,6 @@ if __name__ == "__main__":
             notebook_config['sudo_group'] = 'wheel'
 
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             instance_hostname, notebook_config['ssh_key_path'], notebook_config['initial_user'],
             notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
@@ -123,7 +116,6 @@ if __name__ == "__main__":
     # configuring proxy on Notebook instance
     try:
         logging.info('[CONFIGURE PROXY ON TENSOR INSTANCE]')
-        print('[CONFIGURE PROXY ON TENSOR INSTANCE]')
         additional_config = {"proxy_host": edge_instance_name, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}" \
             .format(instance_hostname, notebook_config['instance_name'], notebook_config['ssh_key_path'],
@@ -141,7 +133,6 @@ if __name__ == "__main__":
     # updating repositories & installing python packages
     try:
         logging.info('[INSTALLING PREREQUISITES TO TENSOR NOTEBOOK INSTANCE]')
-        print('[INSTALLING PREREQUISITES TO TENSOR NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}". \
             format(instance_hostname, notebook_config['ssh_key_path'], notebook_config['datalab_ssh_user'],
                    os.environ['gcp_region'], edge_instance_private_ip)
@@ -157,7 +148,7 @@ if __name__ == "__main__":
 
     #Installing GPU drivers
     try:
-        print('[INSTALLING GPU DRIVERS]')
+        logging.info('[INSTALLING GPU DRIVERS]')
         params = "--hostname {} --keyfile {} --os_user {}".format(
             instance_hostname, notebook_config['ssh_key_path'], notebook_config['datalab_ssh_user'])
         try:
@@ -174,7 +165,6 @@ if __name__ == "__main__":
     # installing and configuring TensorFlow and all dependencies
     try:
         logging.info('[CONFIGURE TENSORFLOW NOTEBOOK INSTANCE]')
-        print('[CONFIGURE TENSORFLOW NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --region {} --os_user {} --exploratory_name {} --edge_ip {}" \
                  .format(instance_hostname, notebook_config['ssh_key_path'],
                          os.environ['gcp_region'], notebook_config['datalab_ssh_user'],
@@ -190,7 +180,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": os.environ['project_name'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -208,7 +197,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, notebook_config['ssh_key_path'])
@@ -225,20 +213,20 @@ if __name__ == "__main__":
 
     if notebook_config['image_enabled'] == 'true':
         try:
-            print('[CREATING IMAGE]')
+            logging.info('[CREATING IMAGE]')
             primary_image_id = GCPMeta.get_image_by_name(notebook_config['expected_primary_image_name'])
             if primary_image_id == '':
-                print("Looks like it's first time we configure notebook server. Creating images.")
+                logging.info("Looks like it's first time we configure notebook server. Creating images.")
                 image_id_list = GCPActions.create_image_from_instance_disks(
                     notebook_config['expected_primary_image_name'], notebook_config['expected_secondary_image_name'],
                     notebook_config['instance_name'], notebook_config['zone'], notebook_config['image_labels'])
                 if image_id_list and image_id_list[0] != '':
-                    print("Image of primary disk was successfully created. It's ID is {}".format(image_id_list[0]))
+                    logging.info("Image of primary disk was successfully created. It's ID is {}".format(image_id_list[0]))
                 else:
-                    print("Looks like another image creating operation for your template have been started a "
+                    logging.info("Looks like another image creating operation for your template have been started a "
                           "moment ago.")
                 if image_id_list and image_id_list[1] != '':
-                    print("Image of secondary disk was successfully created. It's ID is {}".format(image_id_list[1]))
+                    logging.info("Image of secondary disk was successfully created. It's ID is {}".format(image_id_list[1]))
         except Exception as err:
             datalab.fab.append_result("Failed creating image.", str(err))
             GCPActions.remove_instance(notebook_config['instance_name'], notebook_config['zone'])
@@ -247,7 +235,6 @@ if __name__ == "__main__":
             sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         additional_info = {
             'instance_hostname': instance_hostname,
@@ -287,18 +274,17 @@ if __name__ == "__main__":
             notebook_config['exploratory_name'])
         jupyter_ungit_access_url = "https://" + edge_instance_hostname + "/{}-ungit/".format(
             notebook_config['exploratory_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['instance_name']))
-        print("Private IP: {}".format(ip_address))
-        print("Instance type: {}".format(notebook_config['instance_type']))
-        print("Key name: {}".format(notebook_config['key_name']))
-        print("User key name: {}".format(os.environ['project_name']))
-        print("TensorBoard URL: {}".format(tensorboard_url))
-        print("TensorBoard log dir: /var/log/tensorboard")
-        print("Jupyter URL: {}".format(jupyter_ip_url))
-        print("Ungit URL: {}".format(ungit_ip_url))
-        print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(
+        logging.info("Instance name: {}".format(notebook_config['instance_name']))
+        logging.info("Private IP: {}".format(ip_address))
+        logging.info("Instance type: {}".format(notebook_config['instance_type']))
+        logging.info("Key name: {}".format(notebook_config['key_name']))
+        logging.info("User key name: {}".format(os.environ['project_name']))
+        logging.info("TensorBoard URL: {}".format(tensorboard_url))
+        logging.info("TensorBoard log dir: /var/log/tensorboard")
+        logging.info("Jupyter URL: {}".format(jupyter_ip_url))
+        logging.info("Ungit URL: {}".format(ungit_ip_url))
+        logging.info('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(
             notebook_config['key_name'], notebook_config['datalab_ssh_user'], ip_address))
 
         with open("/root/result.json", 'w') as result:
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/zeppelin_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/zeppelin_configure.py
index a9fe2b4..78a96a1 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/zeppelin_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/zeppelin_configure.py
@@ -25,7 +25,7 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -34,12 +34,6 @@ from fabric import *
 
 if __name__ == "__main__":
     instance_class = 'notebook'
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         GCPMeta = datalab.meta_lib.GCPMeta()
         GCPActions = datalab.actions_lib.GCPActions()
@@ -106,7 +100,6 @@ if __name__ == "__main__":
             notebook_config['sudo_group'] = 'wheel'
 
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             instance_hostname, notebook_config['ssh_key_path'], notebook_config['initial_user'],
             notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
@@ -123,7 +116,6 @@ if __name__ == "__main__":
     # configuring proxy on Notebook instance
     try:
         logging.info('[CONFIGURE PROXY ON ZEPPELIN INSTANCE]')
-        print('[CONFIGURE PROXY ON ZEPPELIN INSTANCE]')
         additional_config = {"proxy_host": edge_instance_name, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}" \
             .format(instance_hostname, notebook_config['instance_name'], notebook_config['ssh_key_path'],
@@ -141,7 +133,6 @@ if __name__ == "__main__":
     # updating repositories & installing python packages
     try:
         logging.info('[INSTALLING PREREQUISITES TO ZEPPELIN NOTEBOOK INSTANCE]')
-        print('[INSTALLING PREREQUISITES TO ZEPPELIN NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}". \
             format(instance_hostname, notebook_config['ssh_key_path'], notebook_config['datalab_ssh_user'],
                    os.environ['gcp_region'], edge_instance_private_ip)
@@ -158,7 +149,6 @@ if __name__ == "__main__":
     # installing and configuring zeppelin and all dependencies
     try:
         logging.info('[CONFIGURE ZEPPELIN NOTEBOOK INSTANCE]')
-        print('[CONFIGURE ZEPPELIN NOTEBOOK INSTANCE]')
         additional_config = {"frontend_hostname": edge_instance_name,
                              "backend_hostname": instance_hostname,
                              "backend_port": "8080",
@@ -190,7 +180,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": os.environ['project_name'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -208,7 +197,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, notebook_config['ssh_key_path'])
@@ -225,20 +213,20 @@ if __name__ == "__main__":
 
     if notebook_config['image_enabled'] == 'true':
         try:
-            print('[CREATING IMAGE]')
+            logging.info('[CREATING IMAGE]')
             primary_image_id = GCPMeta.get_image_by_name(notebook_config['expected_primary_image_name'])
             if primary_image_id == '':
-                print("Looks like it's first time we configure notebook server. Creating images.")
+                logging.info("Looks like it's first time we configure notebook server. Creating images.")
                 image_id_list = GCPActions.create_image_from_instance_disks(
                     notebook_config['expected_primary_image_name'], notebook_config['expected_secondary_image_name'],
                     notebook_config['instance_name'], notebook_config['zone'], notebook_config['image_labels'])
                 if image_id_list and image_id_list[0] != '':
-                    print("Image of primary disk was successfully created. It's ID is {}".format(image_id_list[0]))
+                    logging.info("Image of primary disk was successfully created. It's ID is {}".format(image_id_list[0]))
                 else:
-                    print("Looks like another image creating operation for your template have been started a "
+                    logging.info("Looks like another image creating operation for your template have been started a "
                           "moment ago.")
                 if image_id_list and image_id_list[1] != '':
-                    print("Image of secondary disk was successfully created. It's ID is {}".format(image_id_list[1]))
+                    logging.info("Image of secondary disk was successfully created. It's ID is {}".format(image_id_list[1]))
         except Exception as err:
             datalab.fab.append_result("Failed creating image.", str(err))
             GCPActions.remove_instance(notebook_config['instance_name'], notebook_config['zone'])
@@ -247,7 +235,6 @@ if __name__ == "__main__":
             sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         additional_info = {
             'instance_hostname': instance_hostname,
@@ -284,16 +271,15 @@ if __name__ == "__main__":
             notebook_config['exploratory_name'])
         zeppelin_ungit_access_url = "https://" + edge_instance_hostname + "/{}-ungit/".format(
             notebook_config['exploratory_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['instance_name']))
-        print("Private IP: {}".format(ip_address))
-        print("Instance type: {}".format(notebook_config['instance_type']))
-        print("Key name: {}".format(notebook_config['key_name']))
-        print("User key name: {}".format(os.environ['project_name']))
-        print("Zeppelin URL: {}".format(zeppelin_ip_url))
-        print("Ungit URL: {}".format(ungit_ip_url))
-        print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(
+        logging.info("Instance name: {}".format(notebook_config['instance_name']))
+        logging.info("Private IP: {}".format(ip_address))
+        logging.info("Instance type: {}".format(notebook_config['instance_type']))
+        logging.info("Key name: {}".format(notebook_config['key_name']))
+        logging.info("User key name: {}".format(os.environ['project_name']))
+        logging.info("Zeppelin URL: {}".format(zeppelin_ip_url))
+        logging.info("Ungit URL: {}".format(ungit_ip_url))
+        logging.info('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(
             notebook_config['key_name'], notebook_config['datalab_ssh_user'], ip_address))
 
         with open("/root/result.json", 'w') as result:

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org