You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@datalab.apache.org by lf...@apache.org on 2021/10/04 09:01:04 UTC

[incubator-datalab] branch DATALAB-2409 updated: [DATALAB-2409]: replaced all prints by logging for aws

This is an automated email from the ASF dual-hosted git repository.

lfrolov pushed a commit to branch DATALAB-2409
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git


The following commit(s) were added to refs/heads/DATALAB-2409 by this push:
     new f95bb56  [DATALAB-2409]: replaced all prints by logging for aws
f95bb56 is described below

commit f95bb56b2013bc7ebb1c1fbf59678d19242bc0a6
Author: leonidfrolov <fr...@gmail.com>
AuthorDate: Fri Oct 1 16:28:28 2021 +0300

    [DATALAB-2409]: replaced all prints by logging for aws
---
 .../src/general/scripts/aws/common_collect_data.py |  5 +-
 .../general/scripts/aws/common_create_bucket.py    |  9 +--
 .../scripts/aws/common_create_notebook_image.py    |  3 +-
 .../general/scripts/aws/common_create_policy.py    | 13 +++--
 .../scripts/aws/common_download_git_certfile.py    |  5 +-
 ...common_notebook_configure_dataengine-service.py | 14 +----
 .../aws/common_notebook_configure_dataengine.py    |  8 +--
 .../general/scripts/aws/common_prepare_notebook.py | 14 ++---
 .../scripts/aws/common_remove_remote_kernels.py    |  3 +-
 .../src/general/scripts/aws/common_reupload_key.py |  3 +-
 .../general/scripts/aws/common_start_notebook.py   | 16 +-----
 .../general/scripts/aws/common_stop_notebook.py    | 30 ++++------
 .../scripts/aws/common_terminate_notebook.py       | 25 +++------
 .../scripts/aws/dataengine-service_configure.py    | 32 +++++------
 .../scripts/aws/dataengine-service_create.py       | 40 ++++++-------
 .../scripts/aws/dataengine-service_install_libs.py |  7 +--
 .../scripts/aws/dataengine-service_list_libs.py    |  5 +-
 .../scripts/aws/dataengine-service_prepare.py      |  9 +--
 .../scripts/aws/dataengine-service_terminate.py    | 16 ++----
 .../general/scripts/aws/dataengine_configure.py    | 41 ++++----------
 .../src/general/scripts/aws/dataengine_prepare.py  | 17 +++---
 .../src/general/scripts/aws/dataengine_start.py    | 13 ++---
 .../src/general/scripts/aws/dataengine_stop.py     | 18 ++----
 .../general/scripts/aws/dataengine_terminate.py    | 11 ++--
 .../general/scripts/aws/deeplearning_configure.py  | 54 +++++++-----------
 .../scripts/aws/edge_associate_elastic_ip.py       | 13 ++---
 .../src/general/scripts/aws/edge_configure.py      | 59 ++++++++------------
 .../src/general/scripts/aws/edge_start.py          | 26 +++------
 .../src/general/scripts/aws/edge_status.py         | 12 +---
 .../src/general/scripts/aws/edge_stop.py           | 14 +----
 .../src/general/scripts/aws/jupyter_configure.py   | 62 ++++++++-------------
 .../jupyter_dataengine-service_create_configs.py   |  1 +
 .../jupyter_install_dataengine-service_kernels.py  |  7 ++-
 .../general/scripts/aws/jupyterlab_configure.py    | 60 ++++++++------------
 .../src/general/scripts/aws/odahu_deploy.py        | 19 ++-----
 .../src/general/scripts/aws/odahu_prepare.py       | 26 +++------
 .../src/general/scripts/aws/project_prepare.py     | 48 +++++-----------
 .../src/general/scripts/aws/project_terminate.py   | 36 ++++++------
 .../src/general/scripts/aws/rstudio_configure.py   | 54 ++++++++----------
 .../rstudio_dataengine-service_create_configs.py   |  5 +-
 .../src/general/scripts/aws/ssn_terminate.py       | 18 ++----
 .../scripts/aws/ssn_terminate_aws_resources.py     |  1 -
 .../scripts/aws/tensor-rstudio_configure.py        | 65 +++++++++-------------
 .../src/general/scripts/aws/tensor_configure.py    | 59 ++++++++------------
 .../src/general/scripts/aws/zeppelin_configure.py  | 59 ++++++++------------
 45 files changed, 396 insertions(+), 659 deletions(-)

diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_collect_data.py b/infrastructure-provisioning/src/general/scripts/aws/common_collect_data.py
index 14d2bde..a81e373 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_collect_data.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_collect_data.py
@@ -29,6 +29,7 @@ import traceback
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -43,12 +44,12 @@ if __name__ == "__main__":
             data_instances = get_list_instance_statuses(data.get('host'))
             statuses['host'] = data_instances
         except:
-            print("Hosts JSON wasn't been provided")
+            logging.info("Hosts JSON wasn't been provided")
         try:
             data_clusters = get_list_cluster_statuses(data.get('cluster'))
             statuses['cluster'] = data_clusters
         except:
-            print("Clusters JSON wasn't been provided")
+            logging.info("Clusters JSON wasn't provided")
         with open('/root/result.json', 'w') as outfile:
             json.dump(statuses, outfile)
     except Exception as err:
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_create_bucket.py b/infrastructure-provisioning/src/general/scripts/aws/common_create_bucket.py
index bf98c4d..b34a83a 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_create_bucket.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_create_bucket.py
@@ -25,6 +25,7 @@ import argparse
 import sys
 from datalab.actions_lib import create_s3_bucket
 from datalab.meta_lib import get_bucket_by_name
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--bucket_name', type=str, default='')
@@ -38,13 +39,13 @@ if __name__ == "__main__":
         try:
             bucket = get_bucket_by_name(args.bucket_name)
             if bucket == '':
-                print("Creating bucket {0} with tags {1}.".format(args.bucket_name, args.bucket_tags))
+                logging.info("Creating bucket {0} with tags {1}.".format(args.bucket_name, args.bucket_tags))
                 bucket = create_s3_bucket(args.bucket_name, args.bucket_tags, args.region, args.bucket_name_tag)
             else:
-                print("REQUESTED BUCKET ALREADY EXISTS")
-            print("BUCKET_NAME {}".format(bucket))
+                logging.info("REQUESTED BUCKET ALREADY EXISTS")
+            logging.info("BUCKET_NAME {}".format(bucket))
         except Exception as err:
-            print('Error: {0}'.format(err))
+            logging.error('Error: {0}'.format(err))
             sys.exit(1)
     else:
         parser.print_help()
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_create_notebook_image.py b/infrastructure-provisioning/src/general/scripts/aws/common_create_notebook_image.py
index a1ac88b..3c08b5e 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_create_notebook_image.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_create_notebook_image.py
@@ -27,6 +27,7 @@ import datalab.meta_lib
 import json
 import os
 import sys
+from datalab.logger import logging
 
 if __name__ == "__main__":
     try:
@@ -66,7 +67,7 @@ if __name__ == "__main__":
                                                                       instance_name=image_conf['instance_name'],
                                                                       image_name=image_conf['full_image_name'],
                                                                       tags=json.dumps(image_conf['tags']))
-            print("Image was successfully created. It's name is {}".format(image_conf['full_image_name']))
+            logging.info("Image was successfully created. It's name is {}".format(image_conf['full_image_name']))
 
             with open("/root/result.json", 'w') as result:
                 res = {"notebook_image_name": image_conf['image_name'],
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_create_policy.py b/infrastructure-provisioning/src/general/scripts/aws/common_create_policy.py
index 0fe2fdc..76bd1c1 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_create_policy.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_create_policy.py
@@ -27,6 +27,7 @@ import botocore
 import sys
 from datalab.actions_lib import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--bucket_name', type=str, default='')
@@ -51,7 +52,7 @@ if __name__ == "__main__":
             if args.region == 'cn-north-1':
                 policy = policy.replace('aws', 'aws-cn')
         except OSError:
-            print("Failed to open policy template")
+            logging.error("Failed to open policy template")
             sys.exit(1)
 
         list_policies_arn = []
@@ -73,7 +74,7 @@ if __name__ == "__main__":
                 list_policies_arn.append(response.get('Policy').get('Arn'))
             except botocore.exceptions.ClientError as cle:
                 if cle.response['Error']['Code'] == 'EntityAlreadyExists':
-                    print("Policy {}-{}-{}-strict_to_S3-Policy already exists. Reusing it.".
+                    logging.info("Policy {}-{}-{}-strict_to_S3-Policy already exists. Reusing it.".
                           format(args.service_base_name, args.username, args.endpoint_name))
                     list = iam.list_policies().get('Policies')
                     for i in list:
@@ -85,16 +86,16 @@ if __name__ == "__main__":
             try:
                 for arn in list_policies_arn:
                     iam.attach_role_policy(RoleName=args.edge_role_name, PolicyArn=arn)
-                    print('POLICY "{0}" has been attached to role "{1}"'.format(arn, args.edge_role_name))
+                    logging.info('POLICY "{0}" has been attached to role "{1}"'.format(arn, args.edge_role_name))
                     time.sleep(5)
                     iam.attach_role_policy(RoleName=args.notebook_role_name, PolicyArn=arn)
-                    print('POLICY "{0}" has been attached to role "{1}"'.format(arn, args.notebook_role_name))
+                    logging.info('POLICY "{0}" has been attached to role "{1}"'.format(arn, args.notebook_role_name))
                     time.sleep(5)
             except botocore.exceptions.ClientError as e:
-                print(e.response['Error']['Message'])
+                logging.error(e.response['Error']['Message'])
                 sys.exit(1)
         except Exception as err:
-            print('Error: {0}'.format(err))
+            logging.error('Error: {0}'.format(err))
             sys.exit(1)
     else:
         parser.print_help()
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_download_git_certfile.py b/infrastructure-provisioning/src/general/scripts/aws/common_download_git_certfile.py
index 535ac06..bc54b45 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_download_git_certfile.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_download_git_certfile.py
@@ -26,6 +26,7 @@ import os
 from datalab.actions_lib import *
 from fabric import *
 from datalab.fab import replace_multi_symbols
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--keyfile', type=str, default='')
@@ -48,8 +49,8 @@ if __name__ == "__main__":
     if datalab.actions_lib.get_gitlab_cert(bucket_name, gitlab_certfile):
         conn.put(gitlab_certfile, gitlab_certfile)
         conn.sudo('chown root:root {}'.format(gitlab_certfile))
-        print('{} has been downloaded'.format(gitlab_certfile))
+        logging.info('{} has been downloaded'.format(gitlab_certfile))
     else:
-        print('There is no {} to download'.format(gitlab_certfile))
+        logging.info('There is no {} to download'.format(gitlab_certfile))
 
     conn.close()
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_notebook_configure_dataengine-service.py b/infrastructure-provisioning/src/general/scripts/aws/common_notebook_configure_dataengine-service.py
index b265226..c67b30d 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_notebook_configure_dataengine-service.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_notebook_configure_dataengine-service.py
@@ -25,12 +25,12 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
 import os
 import sys
 import traceback
 import subprocess
 from fabric import *
+from datalab.logger import logging
 
 
 def clear_resources():
@@ -42,16 +42,10 @@ def clear_resources():
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         # generating variables dictionary
         datalab.actions_lib.create_aws_config_files()
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         notebook_config = dict()
         notebook_config['service_base_name'] = os.environ['conf_service_base_name'] = datalab.fab.replace_multi_symbols(
             os.environ['conf_service_base_name'][:20], '-', True)
@@ -84,7 +78,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[INSTALLING KERNELS INTO SPECIFIED NOTEBOOK]')
-        print('[INSTALLING KERNELS INTO SPECIFIED NOTEBOOK]')
         params = "--bucket {} --cluster_name {} --emr_version {} --keyfile {} --notebook_ip {} --region {} " \
                  "--emr_excluded_spark_properties {} --project_name {} --os_user {}  --edge_hostname {} " \
                  "--proxy_port {} --scala_version {} --application {}" \
@@ -108,7 +101,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[UPDATING SPARK CONFIGURATION FILES ON NOTEBOOK]')
-        print('[UPDATING SPARK CONFIGURATION FILES ON NOTEBOOK]')
         params = "--hostname {0} " \
                  "--keyfile {1} " \
                  "--os_user {2} " \
@@ -133,7 +125,7 @@ if __name__ == "__main__":
             res = {"notebook_name": notebook_config['notebook_name'],
                    "Tag_name": notebook_config['tag_name'],
                    "Action": "Configure notebook server"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_notebook_configure_dataengine.py b/infrastructure-provisioning/src/general/scripts/aws/common_notebook_configure_dataengine.py
index adf6e07..f9e133c 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_notebook_configure_dataengine.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_notebook_configure_dataengine.py
@@ -25,12 +25,12 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
 import os
 import sys
 import traceback
 import subprocess
 from fabric import *
+from datalab.logger import logging
 
 
 def clear_resources():
@@ -51,7 +51,7 @@ if __name__ == "__main__":
     try:
         # generating variables dictionary
         datalab.actions_lib.create_aws_config_files()
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         notebook_config = dict()
         if 'exploratory_name' in os.environ:
             notebook_config['exploratory_name'] = os.environ['exploratory_name']
@@ -94,7 +94,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[INSTALLING KERNELS INTO SPECIFIED NOTEBOOK]')
-        print('[INSTALLING KERNELS INTO SPECIFIED NOTEBOOK]')
         params = "--cluster_name {0} --spark_version {1} --hadoop_version {2} --os_user {3} --spark_master {4}" \
                  " --keyfile {5} --notebook_ip {6} --spark_master_ip {7}".\
             format(notebook_config['cluster_name'], os.environ['notebook_spark_version'],
@@ -113,7 +112,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[UPDATING SPARK CONFIGURATION FILES ON NOTEBOOK]')
-        print('[UPDATING SPARK CONFIGURATION FILES ON NOTEBOOK]')
         params = "--hostname {0} " \
                  "--keyfile {1} " \
                  "--os_user {2} " \
@@ -136,7 +134,7 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"notebook_name": notebook_config['notebook_name'],
                    "Action": "Configure notebook server"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_prepare_notebook.py b/infrastructure-provisioning/src/general/scripts/aws/common_prepare_notebook.py
index ad7f59d..6d40ffe 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_prepare_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_prepare_notebook.py
@@ -26,12 +26,12 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
 import os
 import sys
 import traceback
 import subprocess
 from fabric import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--uuid', type=str, default='')
@@ -59,7 +59,6 @@ if __name__ == "__main__":
                                                            notebook_config['edge_name'])
         if edge_status != 'running':
             logging.info('ERROR: Edge node is unavailable! Aborting...')
-            print('ERROR: Edge node is unavailable! Aborting...')
             notebook_config['ssn_hostname'] = datalab.meta_lib.get_instance_hostname(
                 '{}-tag'.format(notebook_config['service_base_name']),
                 '{}-ssn'.format(notebook_config['service_base_name']))
@@ -68,7 +67,7 @@ if __name__ == "__main__":
                                             notebook_config['ssn_hostname'])
             datalab.fab.append_result("Edge node is unavailable")
             sys.exit(1)
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         try:
             notebook_config['exploratory_name'] = os.environ['exploratory_name']
         except:
@@ -100,7 +99,7 @@ if __name__ == "__main__":
             notebook_config['service_base_name'], notebook_config['project_name'], notebook_config['endpoint_name'],
             os.environ['application'], os.environ['notebook_image_name']) if (x != 'None' and x != '')
             else notebook_config['expected_image_name'])(str(os.environ.get('notebook_image_name')))
-        print('Searching pre-configured images')
+        logging.info('Searching pre-configured images')
         notebook_config['ami_id'] = datalab.meta_lib.get_ami_id(os.environ['aws_{}_image_name'.format(
             os.environ['conf_os_family'])])
         image_id = datalab.meta_lib.get_ami_id_by_name(notebook_config['notebook_image_name'], 'available')
@@ -108,10 +107,10 @@ if __name__ == "__main__":
             image_id = datalab.meta_lib.get_ami_id(os.environ['notebook_image_name'])
         if image_id != '':
             notebook_config['ami_id'] = image_id
-            print('Pre-configured image found. Using: {}'.format(notebook_config['ami_id']))
+            logging.info('Pre-configured image found. Using: {}'.format(notebook_config['ami_id']))
         else:
             os.environ['notebook_image_name'] = os.environ['aws_{}_image_name'.format(os.environ['conf_os_family'])]
-            print('No pre-configured image found. Using default one: {}'.format(notebook_config['ami_id']))
+            logging.info('No pre-configured image found. Using default one: {}'.format(notebook_config['ami_id']))
 
         tag = {"Key": notebook_config['tag_name'],
                "Value": "{}-{}-{}-subnet".format(notebook_config['service_base_name'], notebook_config['project_name'],
@@ -130,7 +129,7 @@ if __name__ == "__main__":
             os.environ['conf_additional_tags'] = 'project_tag:{0};endpoint_tag:{1}'.format(
                 notebook_config['project_name'], notebook_config['endpoint_name'])
 
-        print('Additional tags will be added: {}'.format(os.environ['conf_additional_tags']))
+        logging.info('Additional tags will be added: {}'.format(os.environ['conf_additional_tags']))
     except Exception as err:
         datalab.fab.append_result("Failed to generate variables dictionary.", str(err))
         sys.exit(1)
@@ -138,7 +137,6 @@ if __name__ == "__main__":
     # launching instance for notebook server
     try:
         logging.info('[CREATE NOTEBOOK INSTANCE]')
-        print('[CREATE NOTEBOOK INSTANCE]')
         params = "--node_name {} --ami_id {} --instance_type {} --key_name {} --security_group_ids {} --subnet_id {} " \
                  "--iam_profile {} --infra_tag_name {} --infra_tag_value {} --instance_class {} " \
                  "--instance_disk_size {} --primary_disk_size {}" .format(
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_remove_remote_kernels.py b/infrastructure-provisioning/src/general/scripts/aws/common_remove_remote_kernels.py
index e1af215..5104e84 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_remove_remote_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_remove_remote_kernels.py
@@ -26,6 +26,7 @@ import sys
 from datalab.actions_lib import remove_dataengine_kernels, remove_kernels
 from datalab.fab import init_datalab_connection, find_cluster_kernels
 from fabric import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--hostname', type=str, default='')
@@ -51,5 +52,5 @@ if __name__ == "__main__":
         conn.close()
         sys.exit(0)
     except Exception as err:
-        print('Failed to remove cluster kernels.', str(err))
+        logging.error('Failed to remove cluster kernels.', str(err))
         sys.exit(1)
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_reupload_key.py b/infrastructure-provisioning/src/general/scripts/aws/common_reupload_key.py
index d80f53a..c263bf2 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_reupload_key.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_reupload_key.py
@@ -27,6 +27,7 @@ import subprocess
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -48,5 +49,5 @@ if __name__ == "__main__":
         try:
             subprocess.run("~/scripts/{}.py {}".format('install_user_key', params), shell=True, check=True)
         except Exception as err:
-            print('Error: {0}'.format(err))
+            logging.error('Error: {0}'.format(err))
             sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_start_notebook.py b/infrastructure-provisioning/src/general/scripts/aws/common_start_notebook.py
index 22c0d5e..4bc9121 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_start_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_start_notebook.py
@@ -25,23 +25,17 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
 import os
 import sys
 import traceback
 import subprocess
 from fabric import *
+from datalab.logger import logging
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     # generating variables dictionary
     datalab.actions_lib.create_aws_config_files()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     notebook_config = dict()
     notebook_config['service_base_name'] = (os.environ['conf_service_base_name'])
     notebook_config['notebook_name'] = os.environ['notebook_instance_name']
@@ -49,10 +43,9 @@ if __name__ == "__main__":
 
     try:
         logging.info('[START NOTEBOOK]')
-        print('[START NOTEBOOK]')
         params = "--tag_name {} --nb_tag_value {}".format(notebook_config['tag_name'], notebook_config['notebook_name'])
         try:
-            print("Starting notebook")
+            logging.info("Starting notebook")
             datalab.actions_lib.start_ec2(notebook_config['tag_name'], notebook_config['notebook_name'])
         except Exception as err:
             traceback.print_exc()
@@ -63,7 +56,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[SETUP USER GIT CREDENTIALS]')
-        print('[SETUP USER GIT CREDENTIALS]')
         notebook_config['notebook_ip'] = datalab.meta_lib.get_instance_ip_address(
             notebook_config['tag_name'], notebook_config['notebook_name']).get('Private')
         notebook_config['keyfile'] = '{}{}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
@@ -80,7 +72,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[UPDATE LAST ACTIVITY TIME]')
-        print('[UPDATE LAST ACTIVITY TIME]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(os.environ['conf_os_user'], notebook_config['notebook_ip'], notebook_config['keyfile'])
         try:
@@ -96,7 +87,6 @@ if __name__ == "__main__":
         ip_address = datalab.meta_lib.get_instance_ip_address(notebook_config['tag_name'],
                                                               notebook_config['notebook_name']).get('Private')
         dns_name = datalab.meta_lib.get_instance_hostname(notebook_config['tag_name'], notebook_config['notebook_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
         print("Instance name: {}".format(notebook_config['notebook_name']))
         print("Private DNS: {}".format(dns_name))
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_stop_notebook.py b/infrastructure-provisioning/src/general/scripts/aws/common_stop_notebook.py
index 9ddcdf4..20bc02a 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_stop_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_stop_notebook.py
@@ -26,14 +26,14 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
 import os
 import sys
 from fabric import *
+from datalab.logger import logging
 
 
 def stop_notebook(nb_tag_value, bucket_name, tag_name, ssh_user, key_path):
-    print('Terminating EMR cluster and cleaning EMR config from S3 bucket')
+    logging.info('Terminating EMR cluster and cleaning EMR config from S3 bucket')
     try:
         clusters_list = datalab.meta_lib.get_emr_list(nb_tag_value, 'Value')
         if clusters_list:
@@ -48,18 +48,18 @@ def stop_notebook(nb_tag_value, bucket_name, tag_name, ssh_user, key_path):
                     if tag.get('Key') == 'ComputationalName':
                         computational_name = tag.get('Value')
                 datalab.actions_lib.s3_cleanup(bucket_name, emr_name, os.environ['project_name'])
-                print("The bucket {} has been cleaned successfully".format(bucket_name))
+                logging.info("The bucket {} has been cleaned successfully".format(bucket_name))
                 datalab.actions_lib.terminate_emr(cluster_id)
-                print("The EMR cluster {} has been terminated successfully".format(emr_name))
+                logging.info("The EMR cluster {} has been terminated successfully".format(emr_name))
                 datalab.actions_lib.remove_kernels(emr_name, tag_name, nb_tag_value, ssh_user, key_path, emr_version,
                                                    computational_name)
-                print("{} kernels have been removed from notebook successfully".format(emr_name))
+                logging.info("{} kernels have been removed from notebook successfully".format(emr_name))
         else:
-            print("There are no EMR clusters to terminate.")
+            logging.info("There are no EMR clusters to terminate.")
     except:
         sys.exit(1)
 
-    print("Stopping data engine cluster")
+    logging.info("Stopping data engine cluster")
     try:
         cluster_list = []
         master_ids = []
@@ -76,7 +76,7 @@ def stop_notebook(nb_tag_value, bucket_name, tag_name, ssh_user, key_path):
     except:
         sys.exit(1)
 
-    print("Stopping notebook")
+    logging.info("Stopping notebook")
     try:
         datalab.actions_lib.stop_ec2(tag_name, nb_tag_value)
     except:
@@ -84,16 +84,9 @@ def stop_notebook(nb_tag_value, bucket_name, tag_name, ssh_user, key_path):
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     # generating variables dictionary
     datalab.actions_lib.create_aws_config_files()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     notebook_config = dict()
     notebook_config['service_base_name'] = (os.environ['conf_service_base_name'])
     notebook_config['notebook_name'] = os.environ['notebook_instance_name']
@@ -107,12 +100,11 @@ if __name__ == "__main__":
     notebook_config['key_path'] = os.environ['conf_key_dir'] + '/' + os.environ['conf_key_name'] + '.pem'
 
     logging.info('[STOP NOTEBOOK]')
-    print('[STOP NOTEBOOK]')
     try:
         stop_notebook(notebook_config['notebook_name'], notebook_config['bucket_name'], notebook_config['tag_name'],
                       os.environ['conf_os_user'], notebook_config['key_path'])
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         datalab.fab.append_result("Failed to stop notebook.", str(err))
         sys.exit(1)
 
@@ -123,7 +115,7 @@ if __name__ == "__main__":
                    "Tag_name": notebook_config['tag_name'],
                    "user_own_bucket_name": notebook_config['bucket_name'],
                    "Action": "Stop notebook server"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_terminate_notebook.py b/infrastructure-provisioning/src/general/scripts/aws/common_terminate_notebook.py
index 5a90d1b..a7e92f1 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_terminate_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_terminate_notebook.py
@@ -26,14 +26,14 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
 import os
 import sys
 import traceback
+from datalab.logger import logging
 
 
 def terminate_nb(nb_tag_value, bucket_name, tag_name):
-    print('Terminating EMR cluster and cleaning EMR config from S3 bucket')
+    logging.info('Terminating EMR cluster and cleaning EMR config from S3 bucket')
     try:
         clusters_list = datalab.meta_lib.get_emr_list(nb_tag_value, 'Value')
         if clusters_list:
@@ -42,24 +42,24 @@ def terminate_nb(nb_tag_value, bucket_name, tag_name):
                 cluster = client.describe_cluster(ClusterId=cluster_id)
                 cluster = cluster.get("Cluster")
                 emr_name = cluster.get('Name')
-                print('Cleaning bucket from configs for cluster {}'.format(emr_name))
+                logging.info('Cleaning bucket from configs for cluster {}'.format(emr_name))
                 datalab.actions_lib.s3_cleanup(bucket_name, emr_name, os.environ['project_name'])
                 print("The bucket {} has been cleaned successfully".format(bucket_name))
                 print('Terminating cluster {}'.format(emr_name))
                 datalab.actions_lib.terminate_emr(cluster_id)
-                print("The EMR cluster {} has been terminated successfully".format(emr_name))
+                logging.info("The EMR cluster {} has been terminated successfully".format(emr_name))
         else:
-            print("There are no EMR clusters to terminate.")
+            logging.info("There are no EMR clusters to terminate.")
     except:
         sys.exit(1)
 
-    print("Terminating data engine cluster")
+    logging.info("Terminating data engine cluster")
     try:
         datalab.actions_lib.remove_ec2('dataengine_notebook_name', nb_tag_value)
     except:
         sys.exit(1)
 
-    print("Terminating notebook")
+    logging.info("Terminating notebook")
     try:
         datalab.actions_lib.remove_ec2(tag_name, nb_tag_value)
     except:
@@ -67,15 +67,9 @@ def terminate_nb(nb_tag_value, bucket_name, tag_name):
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     # generating variables dictionary
     datalab.actions_lib.create_aws_config_files()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     notebook_config = dict()
     notebook_config['service_base_name'] = (os.environ['conf_service_base_name'])
     notebook_config['notebook_name'] = os.environ['notebook_instance_name']
@@ -89,7 +83,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[TERMINATE NOTEBOOK]')
-        print('[TERMINATE NOTEBOOK]')
         try:
             terminate_nb(notebook_config['notebook_name'], notebook_config['bucket_name'], notebook_config['tag_name'])
         except Exception as err:
@@ -105,7 +98,7 @@ if __name__ == "__main__":
                    "Tag_name": notebook_config['tag_name'],
                    "user_own_bucket_name": notebook_config['bucket_name'],
                    "Action": "Terminate notebook server"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_configure.py b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_configure.py
index 2cbec9b..0aebdf5 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_configure.py
@@ -26,13 +26,13 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
 import multiprocessing
 import os
 import sys
 import traceback
 from datalab.common_lib import manage_pkg
 from fabric import *
+from datalab.logger import logging
 import subprocess
 
 parser = argparse.ArgumentParser()
@@ -44,7 +44,6 @@ def configure_dataengine_service(instance, emr_conf):
     emr_conf['instance_ip'] = instance.get('PrivateIpAddress')
     try:
         logging.info('[CREATING DATALAB SSH USER ON DATAENGINE SERVICE]')
-        print('[CREATING DATALAB SSH USER ON DATAENGINE SERVICE]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format \
             (emr_conf['instance_ip'], emr_conf['key_path'], emr_conf['initial_user'],
              emr_conf['os_user'], emr_conf['sudo_group'])
@@ -61,7 +60,6 @@ def configure_dataengine_service(instance, emr_conf):
     # configuring proxy on Data Engine service
     try:
         logging.info('[CONFIGURE PROXY ON DATAENGINE SERVICE]')
-        print('[CONFIGURE PROXY ON DATAENGINE SERVICE]')
         additional_config = {"proxy_host": emr_conf['edge_instance_hostname'], "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}" \
             .format(emr_conf['instance_ip'], emr_conf['cluster_name'], emr_conf['key_path'],
@@ -78,7 +76,6 @@ def configure_dataengine_service(instance, emr_conf):
 
     try:
         logging.info('[CONFIGURE DATAENGINE SERVICE]')
-        print('[CONFIGURE DATAENGINE SERVICE]')
         try:
             datalab.fab.configure_data_engine_service_pip(emr_conf['instance_ip'], emr_conf['os_user'],
                                                           emr_conf['key_path'], True)
@@ -97,7 +94,6 @@ def configure_dataengine_service(instance, emr_conf):
 
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         cluster_master_instances = emr_conf['cluster_master_instances']
         slaves = []
@@ -138,7 +134,6 @@ def configure_dataengine_service(instance, emr_conf):
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": emr_conf['user_keyname'], "user_keydir": os.environ['conf_key_dir']}
         params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
@@ -164,7 +159,7 @@ if __name__ == "__main__":
 
     try:
         datalab.actions_lib.create_aws_config_files()
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         emr_conf = dict()
         if 'exploratory_name' in os.environ:
             emr_conf['exploratory_name'] = os.environ['exploratory_name']
@@ -257,18 +252,17 @@ if __name__ == "__main__":
                                                           emr_conf['exploratory_name'],
                                                           emr_conf['computational_name'])
         logging.info('[SUMMARY]')
-        print('[SUMMARY]')
-        print("Service base name: {}".format(emr_conf['service_base_name']))
-        print("Cluster name: {}".format(emr_conf['cluster_name']))
-        print("Cluster id: {}".format(datalab.meta_lib.get_emr_id_by_name(emr_conf['cluster_name'])))
-        print("Key name: {}".format(emr_conf['key_name']))
-        print("Region: {}".format(emr_conf['region']))
-        print("EMR version: {}".format(emr_conf['release_label']))
-        print("EMR master node shape: {}".format(emr_conf['master_instance_type']))
-        print("EMR slave node shape: {}".format(emr_conf['slave_instance_type']))
-        print("Instance count: {}".format(emr_conf['instance_count']))
-        print("Notebook IP address: {}".format(emr_conf['notebook_ip']))
-        print("Bucket name: {}".format(emr_conf['bucket_name']))
+        logging.info("Service base name: {}".format(emr_conf['service_base_name']))
+        logging.info("Cluster name: {}".format(emr_conf['cluster_name']))
+        logging.info("Cluster id: {}".format(datalab.meta_lib.get_emr_id_by_name(emr_conf['cluster_name'])))
+        logging.info("Key name: {}".format(emr_conf['key_name']))
+        logging.info("Region: {}".format(emr_conf['region']))
+        logging.info("EMR version: {}".format(emr_conf['release_label']))
+        logging.info("EMR master node shape: {}".format(emr_conf['master_instance_type']))
+        logging.info("EMR slave node shape: {}".format(emr_conf['slave_instance_type']))
+        logging.info("Instance count: {}".format(emr_conf['instance_count']))
+        logging.info("Notebook IP address: {}".format(emr_conf['notebook_ip']))
+        logging.info("Bucket name: {}".format(emr_conf['bucket_name']))
         with open("/root/result.json", 'w') as result:
             res = {"hostname": emr_conf['cluster_name'],
                    "instance_id": datalab.meta_lib.get_emr_id_by_name(emr_conf['cluster_name']),
diff --git a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_create.py b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_create.py
index e304105..ed31bf5 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_create.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_create.py
@@ -24,7 +24,6 @@
 import argparse
 import ast
 import boto3
-import logging
 import re
 import sys
 import time
@@ -32,6 +31,7 @@ import traceback
 from botocore.client import Config as botoConfig
 from datalab.actions_lib import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -154,7 +154,7 @@ def get_object_count(bucket, prefix):
                     file_list.append(file.get('Key'))
             count = len(file_list)
         except:
-            print("{} still not exist. Waiting...".format(prefix))
+            logging.info("{} still not exist. Waiting...".format(prefix))
             count = 0
         return count
     except Exception as err:
@@ -282,12 +282,12 @@ def parse_steps(step_string):
 def action_validate(id):
     state = get_emr_info(id, 'Status')['State']
     if state in ("TERMINATING", "TERMINATED", "TERMINATED_WITH_ERRORS"):
-        print("Cluster is alredy stopped. Bye")
+        logging.info("Cluster is alredy stopped. Bye")
         return ["False", state]
     elif state in ("RUNNING", "WAITING"):
         return ["True", state]
     else:
-        print("Cluster is still being built.")
+        logging.info("Cluster is still being built.")
         return ["True", state]
 
 
@@ -322,17 +322,17 @@ def build_emr_cluster(args):
             steps = parse_steps(cp_config)
     
         if args.dry_run:
-            print("Build parameters are:")
-            print(args)
-            print("\n")
-            print("Applications to be installed:")
-            print(names)
-            print("\n")
-            print("Cluster tags:")
-            print(tags)
-            print("\n")
-            print("Cluster Jobs:")
-            print(steps)
+            logging.info("Build parameters are:")
+            logging.info(args)
+            logging.info("\n")
+            logging.info("Applications to be installed:")
+            logging.info(names)
+            logging.info("\n")
+            logging.info("Cluster tags:")
+            logging.info(tags)
+            logging.info("\n")
+            logging.info("Cluster Jobs:")
+            logging.info(steps)
     
         if not args.dry_run:
             socket = boto3.client('emr')
@@ -397,7 +397,7 @@ def build_emr_cluster(args):
                     JobFlowRole=args.ec2_role,
                     ServiceRole=args.service_role,
                     Configurations=ast.literal_eval(args.configurations))
-            print("Cluster_id {}".format(result.get('JobFlowId')))
+            logging.info("Cluster_id {}".format(result.get('JobFlowId')))
             return result.get('JobFlowId')
     except Exception as err:
         logging.error("Failed to build EMR cluster: " +
@@ -420,7 +420,7 @@ if __name__ == "__main__":
         build_emr_cluster(args)
     else:
         if not get_role_by_name(args.service_role):
-            print("There is no default EMR service role. Creating...")
+            logging.info("There is no default EMR service role. Creating...")
             create_iam_role(args.service_role,
                             args.service_role,
                             args.region, args.permissions_boundary_arn,
@@ -428,7 +428,7 @@ if __name__ == "__main__":
             attach_policy(args.service_role,
                           policy_arn='arn:aws:iam::aws:policy/service-role/AmazonElasticMapReduceRole')
         if not get_role_by_name(args.ec2_role):
-            print("There is no default EMR EC2 role. Creating...")
+            logging.info("There is no default EMR EC2 role. Creating...")
             create_iam_role(args.ec2_role,
                             args.ec2_role,
                             args.region, args.permissions_boundary_arn)
@@ -449,9 +449,9 @@ if __name__ == "__main__":
             spot_instances_status = get_spot_instances_status(cluster_id)
             bool_, code, message = spot_instances_status
             if bool_:
-                print("Spot instances status: {}, Message:{}".format(code, message))
+                logging.info("Spot instances status: {}, Message:{}".format(code, message))
             else:
-                print("SPOT REQUEST WASN'T FULFILLED, BECAUSE: "
+                logging.info("SPOT REQUEST WASN'T FULFILLED, BECAUSE: "
                       "STATUS CODE IS {}, MESSAGE IS {}".format(code, message))
                 append_result("Error with Spot request. Status code: {}, Message: {}".format(code, message))
                 sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_install_libs.py b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_install_libs.py
index d687c8a..2e6f8db 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_install_libs.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_install_libs.py
@@ -21,7 +21,6 @@
 #
 # ******************************************************************************
 
-import logging
 import multiprocessing
 import os
 import sys
@@ -31,6 +30,7 @@ from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.meta_lib import *
 from fabric import *
+from datalab.logger import logging
 
 
 def install_libs(instance, data_engine):
@@ -58,7 +58,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[INSTALLING ADDITIONAL LIBRARIES ON DATAENGINE-SERVICE]')
-        print('[INSTALLING ADDITIONAL LIBRARIES ON DATAENGINE-SERVICE]')
         data_engine = dict()
         try:
             data_engine['os_user'] = 'ec2-user'
@@ -68,7 +67,7 @@ if __name__ == "__main__":
             data_engine['keyfile'] = '{}{}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
             data_engine['libs'] = os.environ['libs']
         except Exception as err:
-            print('Error: {0}'.format(err))
+            logging.error('Error: {0}'.format(err))
             append_result("Failed to get parameter.", str(err))
             sys.exit(1)
         try:
@@ -86,6 +85,6 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         append_result("Failed to install additional libraries.", str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_list_libs.py b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_list_libs.py
index 967fc6f..1b232ff 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_list_libs.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_list_libs.py
@@ -21,7 +21,6 @@
 #
 # ******************************************************************************
 
-import logging
 import os
 import sys
 import traceback
@@ -29,6 +28,7 @@ import subprocess
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 from fabric import *
 
 if __name__ == "__main__":
@@ -43,7 +43,6 @@ if __name__ == "__main__":
     try:
         create_aws_config_files()
         logging.info('[GETTING ALL AVAILABLE PACKAGES]')
-        print('[GETTING ALL AVAILABLE PACKAGES]')
         data_engine = dict()
         try:
             data_engine['os_user'] = 'ec2-user'
@@ -65,6 +64,6 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         append_result("Failed to get available libraries.", str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_prepare.py b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_prepare.py
index bee05a4..c5a2d0d 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_prepare.py
@@ -26,13 +26,13 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
 import os
 import sys
 import time
 import traceback
 import subprocess
 from fabric import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--uuid', type=str, default='')
@@ -63,7 +63,6 @@ if __name__ == "__main__":
                                       emr_conf['endpoint_name']))
         if edge_status != 'running':
             logging.info('ERROR: Edge node is unavailable! Aborting...')
-            print('ERROR: Edge node is unavailable! Aborting...')
             ssn_hostname = datalab.meta_lib.get_instance_hostname(
                 emr_conf['service_base_name'] + '-tag',
                 emr_conf['service_base_name'] + '-ssn')
@@ -72,7 +71,7 @@ if __name__ == "__main__":
                                             os.environ['conf_os_user'], ssn_hostname)
             datalab.fab.append_result("Edge node is unavailable")
             sys.exit(1)
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         if 'computational_name' in os.environ:
             emr_conf['computational_name'] = os.environ['computational_name']
         else:
@@ -151,7 +150,7 @@ if __name__ == "__main__":
         datalab.fab.append_result("Failed to generate variables dictionary", str(err))
         sys.exit(1)
 
-    print("Will create exploratory environment with edge node as access point as following: {}".format(
+    logging.info("Will create exploratory environment with edge node as access point as following: {}".format(
         json.dumps(emr_conf, sort_keys=True, indent=4, separators=(',', ': '))))
     logging.info(json.dumps(emr_conf))
 
@@ -172,7 +171,6 @@ if __name__ == "__main__":
         json.dump(data, f)
 
     logging.info('[CREATING ADDITIONAL SECURITY GROUPS FOR EMR]')
-    print("[CREATING ADDITIONAL SECURITY GROUPS FOR EMR]")
     try:
         group_id = datalab.meta_lib.check_security_group(emr_conf['edge_security_group_name'])
         cluster_sg_ingress = [
@@ -260,7 +258,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[Creating EMR Cluster]')
-        print('[Creating EMR Cluster]')
         params = "--name {0} " \
                  "--applications '{1}' " \
                  "--master_instance_type {2} " \
diff --git a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_terminate.py b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_terminate.py
index db9077c..193544c 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_terminate.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_terminate.py
@@ -26,14 +26,14 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
 import os
 import sys
 import traceback
+from datalab.logger import logging
 
 
 def terminate_emr_cluster(emr_name, bucket_name, tag_name, nb_tag_value, ssh_user, key_path):
-    print('Terminating EMR cluster and cleaning EMR config from S3 bucket')
+    logging.info('Terminating EMR cluster and cleaning EMR config from S3 bucket')
     try:
         clusters_list = datalab.meta_lib.get_emr_list(emr_name, 'Value')
         if clusters_list:
@@ -55,22 +55,15 @@ def terminate_emr_cluster(emr_name, bucket_name, tag_name, nb_tag_value, ssh_use
                 datalab.actions_lib.remove_kernels(emr_name, tag_name, nb_tag_value, ssh_user, key_path,
                                                    emr_version, computational_name)
         else:
-            print("There are no EMR clusters to terminate.")
+            logging.info("There are no EMR clusters to terminate.")
     except:
         sys.exit(1)
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     # generating variables dictionary
     datalab.actions_lib.create_aws_config_files()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     emr_conf = dict()
     emr_conf['service_base_name'] = (os.environ['conf_service_base_name'])
     emr_conf['emr_name'] = os.environ['emr_cluster_name']
@@ -84,7 +77,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[TERMINATE EMR CLUSTER]')
-        print('[TERMINATE EMR CLUSTER]')
         try:
             terminate_emr_cluster(emr_conf['emr_name'], emr_conf['bucket_name'], emr_conf['tag_name'],
                                   emr_conf['notebook_name'], os.environ['conf_os_user'], emr_conf['key_path'])
diff --git a/infrastructure-provisioning/src/general/scripts/aws/dataengine_configure.py b/infrastructure-provisioning/src/general/scripts/aws/dataengine_configure.py
index 7995824..47fd12a 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/dataengine_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/dataengine_configure.py
@@ -25,12 +25,12 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
 import multiprocessing
 import os
 import sys
 import traceback
 from fabric import *
+from datalab.logger import logging
 import subprocess
 
 def configure_slave(slave_number, data_engine):
@@ -38,7 +38,6 @@ def configure_slave(slave_number, data_engine):
     slave_hostname = datalab.meta_lib.get_instance_private_ip_address(data_engine['tag_name'], slave_name)
     try:
         logging.info('[CREATING DATALAB SSH USER ON SLAVE NODE]')
-        print('[CREATING DATALAB SSH USER ON SLAVE NODE]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             master_node_hostname, "{}{}.pem".format(os.environ['conf_key_dir'], data_engine['key_name']),
             data_engine['initial_user'], data_engine['datalab_ssh_user'], data_engine['sudo_group'])
@@ -55,7 +54,6 @@ def configure_slave(slave_number, data_engine):
 
     try:
         logging.info('[CLEANING INSTANCE FOR SLAVE NODE]')
-        print('[CLEANING INSTANCE FOR SLAVE NODE]')
         params = '--hostname {} --keyfile {} --os_user {} --application {}' \
             .format(slave_hostname, keyfile_name, data_engine['datalab_ssh_user'], os.environ['application'])
         try:
@@ -70,7 +68,6 @@ def configure_slave(slave_number, data_engine):
 
     try:
         logging.info('[CONFIGURE PROXY ON SLAVE NODE]')
-        print('[CONFIGURE PROXY ON ON SLAVE NODE]')
         additional_config = {"proxy_host": edge_instance_hostname, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}"\
             .format(slave_hostname, slave_name, keyfile_name, json.dumps(additional_config),
@@ -87,7 +84,6 @@ def configure_slave(slave_number, data_engine):
 
     try:
         logging.info('[INSTALLING PREREQUISITES ON SLAVE NODE]')
-        print('[INSTALLING PREREQUISITES ON SLAVE NODE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}". \
             format(slave_hostname, keyfile_name, data_engine['datalab_ssh_user'], data_engine['region'],
                    edge_instance_private_ip)
@@ -103,7 +99,6 @@ def configure_slave(slave_number, data_engine):
 
     try:
         logging.info('[CONFIGURE SLAVE NODE {}]'.format(slave + 1))
-        print('[CONFIGURE SLAVE NODE {}]'.format(slave + 1))
         params = "--hostname {} --keyfile {} --region {} --spark_version {} --hadoop_version {} --os_user {} " \
                  "--scala_version {} --r_mirror {} --master_ip {} --node_type {}". \
             format(slave_hostname, keyfile_name, data_engine['region'], os.environ['notebook_spark_version'],
@@ -121,7 +116,6 @@ def configure_slave(slave_number, data_engine):
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": data_engine['user_keyname'], "user_keydir": os.environ['conf_key_dir']}
         params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
@@ -145,15 +139,8 @@ def clear_resources():
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.INFO,
-                        filename=local_log_filepath)
-
     try:
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         data_engine = dict()
         if 'exploratory_name' in os.environ:
             data_engine['exploratory_name'] = os.environ['exploratory_name']
@@ -222,7 +209,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATING DATALAB SSH USER ON MASTER NODE]')
-        print('[CREATING DATALAB SSH USER ON MASTER NODE]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             master_node_hostname, "{}{}.pem".format(os.environ['conf_key_dir'], data_engine['key_name']),
             data_engine['initial_user'], data_engine['datalab_ssh_user'], data_engine['sudo_group'])
@@ -239,7 +225,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CLEANING INSTANCE FOR MASTER NODE]')
-        print('[CLEANING INSTANCE FOR MASTER NODE]')
         params = '--hostname {} --keyfile {} --os_user {} --application {}' \
             .format(master_node_hostname, keyfile_name, data_engine['datalab_ssh_user'], os.environ['application'])
         try:
@@ -254,7 +239,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CONFIGURE PROXY ON MASTER NODE]')
-        print('[CONFIGURE PROXY ON ON MASTER NODE]')
         additional_config = {"proxy_host": edge_instance_hostname, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}"\
             .format(master_node_hostname, data_engine['master_node_name'], keyfile_name, json.dumps(additional_config),
@@ -271,7 +255,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[INSTALLING PREREQUISITES ON MASTER NODE]')
-        print('[INSTALLING PREREQUISITES ON MASTER NODE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}". \
             format(master_node_hostname, keyfile_name, data_engine['datalab_ssh_user'], data_engine['region'],
                    edge_instance_private_ip)
@@ -286,7 +269,7 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY on MASTER NODE]')
+        logging.info('[INSTALLING USERs KEY on MASTER NODE]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": data_engine['user_keyname'], "user_keydir": os.environ['conf_key_dir']}
         params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
@@ -303,7 +286,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CONFIGURE MASTER NODE]')
-        print('[CONFIGURE MASTER NODE]')
         params = "--hostname {} --keyfile {} --region {} --spark_version {} --hadoop_version {} --os_user {} " \
                  "--scala_version {} --r_mirror {} --master_ip {} --node_type {}".\
             format(master_node_hostname, keyfile_name, data_engine['region'], os.environ['notebook_spark_version'],
@@ -337,7 +319,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         notebook_instance_ip = datalab.meta_lib.get_instance_private_ip_address('Name',
                                                                                 os.environ['notebook_instance_name'])
@@ -380,13 +361,13 @@ if __name__ == "__main__":
                                                              data_engine['exploratory_name'],
                                                              data_engine['computational_name'])
         logging.info('[SUMMARY]')
-        print('[SUMMARY]')
-        print("Service base name: {}".format(data_engine['service_base_name']))
-        print("Region: {}".format(data_engine['region']))
-        print("Cluster name: {}".format(data_engine['cluster_name']))
-        print("Master node shape: {}".format(data_engine['master_size']))
-        print("Slave node shape: {}".format(data_engine['slave_size']))
-        print("Instance count: {}".format(str(data_engine['instance_count'])))
+        logging.info('[SUMMARY]')
+        logging.info("Service base name: {}".format(data_engine['service_base_name']))
+        logging.info("Region: {}".format(data_engine['region']))
+        logging.info("Cluster name: {}".format(data_engine['cluster_name']))
+        logging.info("Master node shape: {}".format(data_engine['master_size']))
+        logging.info("Slave node shape: {}".format(data_engine['slave_size']))
+        logging.info("Instance count: {}".format(str(data_engine['instance_count'])))
         with open("/root/result.json", 'w') as result:
             res = {"hostname": data_engine['cluster_name'],
                    "instance_id": datalab.meta_lib.get_instance_by_name(data_engine['tag_name'],
@@ -399,7 +380,7 @@ if __name__ == "__main__":
                        #{"description": "Apache Spark Master (via tunnel)",
                         #"url": spark_master_url}
                    ]}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/aws/dataengine_prepare.py b/infrastructure-provisioning/src/general/scripts/aws/dataengine_prepare.py
index 3919513..1ef501d 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/dataengine_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/dataengine_prepare.py
@@ -25,12 +25,12 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
 import os
 import sys
 import traceback
 import subprocess
 from fabric import *
+from datalab.logger import logging
 
 if __name__ == "__main__":
     local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
@@ -50,7 +50,6 @@ if __name__ == "__main__":
                 data_engine['service_base_name'], data_engine['project_name'], data_engine['endpoint_name']))
         if edge_status != 'running':
             logging.info('ERROR: Edge node is unavailable! Aborting...')
-            print('ERROR: Edge node is unavailable! Aborting...')
             ssn_hostname = datalab.meta_lib.get_instance_hostname(data_engine['service_base_name'] + '-tag',
                                                                   data_engine['service_base_name'] + '-ssn')
             datalab.fab.put_resource_status('edge', 'Unavailable', os.environ['ssn_datalab_path'],
@@ -58,7 +57,7 @@ if __name__ == "__main__":
                                             ssn_hostname)
             datalab.fab.append_result("Edge node is unavailable")
             sys.exit(1)
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         if 'exploratory_name' in os.environ:
             data_engine['exploratory_name'] = os.environ['exploratory_name']
         else:
@@ -116,16 +115,16 @@ if __name__ == "__main__":
                                                    data_engine['endpoint_name']) if (
                     x != 'None' and x != '')
             else data_engine['expected_image_name'])(str(os.environ.get('notebook_image_name')))
-        print('Searching pre-configured images')
+        logging.info('Searching pre-configured images')
         data_engine['ami_id'] = datalab.meta_lib.get_ami_id(os.environ['aws_{}_image_name'.format(
             os.environ['conf_os_family'])])
         image_id = datalab.meta_lib.get_ami_id_by_name(data_engine['notebook_image_name'], 'available')
         if image_id != '' and os.environ['application'] in os.environ['dataengine_image_notebooks'].split(','):
             data_engine['ami_id'] = image_id
-            print('Pre-configured image found. Using: {}'.format(data_engine['ami_id']))
+            logging.info('Pre-configured image found. Using: {}'.format(data_engine['ami_id']))
         else:
             os.environ['notebook_image_name'] = os.environ['aws_{}_image_name'.format(os.environ['conf_os_family'])]
-            print('No pre-configured image found. Using default one: {}'.format(data_engine['ami_id']))
+            logging.info('No pre-configured image found. Using default one: {}'.format(data_engine['ami_id']))
 
     except Exception as err:
         datalab.fab.append_result("Failed to generate variables dictionary.", str(err))
@@ -141,11 +140,10 @@ if __name__ == "__main__":
     except KeyError:
         os.environ['conf_additional_tags'] = 'project_tag:{0};endpoint_tag:{1}'.format(data_engine['project_name'],
                                                                                        data_engine['endpoint_name'])
-    print('Additional tags will be added: {}'.format(os.environ['conf_additional_tags']))
+    logging.info('Additional tags will be added: {}'.format(os.environ['conf_additional_tags']))
 
     try:
         logging.info('[CREATE MASTER NODE]')
-        print('[CREATE MASTER NODE]')
         data_engine['cluster_nodes_tag_type'] = {"Key": "Type", "Value": "master"}
         params = "--node_name {} --ami_id {} --instance_type {} --key_name {} --security_group_ids {} " \
                  "--subnet_id {} --iam_profile {} --infra_tag_name {} --infra_tag_value {} --primary_disk_size {} " \
@@ -174,7 +172,6 @@ if __name__ == "__main__":
     try:
         for i in range(data_engine['instance_count'] - 1):
             logging.info('[CREATE SLAVE NODE {}]'.format(i + 1))
-            print('[CREATE SLAVE NODE {}]'.format(i + 1))
             slave_name = data_engine['slave_node_name'] + '{}'.format(i + 1)
             data_engine['cluster_nodes_tag_type'] = {"Key": "Type", "Value": "slave"}
             params = "--node_name {} --ami_id {} --instance_type {} --key_name {} --security_group_ids {} " \
@@ -206,6 +203,6 @@ if __name__ == "__main__":
             try:
                 datalab.actions_lib.remove_ec2(data_engine['tag_name'], slave_name)
             except:
-                print("The slave instance {} hasn't been created.".format(slave_name))
+                logging.error("The slave instance {} hasn't been created.".format(slave_name))
         datalab.fab.append_result("Failed to create slave instances.", str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/dataengine_start.py b/infrastructure-provisioning/src/general/scripts/aws/dataengine_start.py
index e8b3ecd..073ad77 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/dataengine_start.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/dataengine_start.py
@@ -25,16 +25,16 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
 import os
 import sys
 import traceback
 import subprocess
 from fabric import *
+from datalab.logger import logging
 
 
 def start_data_engine(cluster_name):
-    print("Start Data Engine")
+    logging.info("Start Data Engine")
     try:
         datalab.actions_lib.start_ec2(os.environ['conf_tag_resource_id'], cluster_name)
     except:
@@ -53,7 +53,7 @@ if __name__ == "__main__":
 
     # generating variables dictionary
     datalab.actions_lib.create_aws_config_files()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     data_engine = dict()
     
     try:
@@ -74,18 +74,17 @@ if __name__ == "__main__":
                                                           data_engine['computational_name'])
 
     logging.info('[START DATA ENGINE CLUSTER]')
-    print('[START DATA ENGINE CLUSTER]')
     try:
         start_data_engine("{}:{}".format(data_engine['service_base_name'],
                                          data_engine['cluster_name']))
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         datalab.fab.append_result("Failed to start Data Engine.", str(err))
         sys.exit(1)
 
     try:
         logging.info('[UPDATE LAST ACTIVITY TIME]')
-        print('[UPDATE LAST ACTIVITY TIME]')
+        logging.info('[UPDATE LAST ACTIVITY TIME]')
         data_engine['computational_id'] = data_engine['cluster_name'] + '-m'
         data_engine['tag_name'] = data_engine['service_base_name'] + '-tag'
         data_engine['notebook_ip'] = datalab.meta_lib.get_instance_ip_address(
@@ -109,7 +108,7 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"service_base_name": data_engine['service_base_name'],
                    "Action": "Start Data Engine"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/aws/dataengine_stop.py b/infrastructure-provisioning/src/general/scripts/aws/dataengine_stop.py
index 8f849ab..26f7e81 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/dataengine_stop.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/dataengine_stop.py
@@ -24,9 +24,9 @@
 import datalab.actions_lib
 import datalab.fab
 import json
-import logging
 import os
 import sys
+from datalab.logger import logging
 
 
 def stop_data_engine(cluster_name):
@@ -38,18 +38,9 @@ def stop_data_engine(cluster_name):
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'],
-                                               os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + \
-                         os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     # generating variables dictionary
     datalab.actions_lib.create_aws_config_files()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     data_engine_config = dict()
     try:
         data_engine_config['exploratory_name'] = os.environ['exploratory_name']
@@ -68,12 +59,11 @@ if __name__ == "__main__":
                                                                  data_engine_config['computational_name'])
 
     logging.info('[STOP DATA ENGINE CLUSTER]')
-    print('[STOP DATA ENGINE CLUSTER]')
     try:
         stop_data_engine("{}:{}".format(data_engine_config['service_base_name'],
                                         data_engine_config['cluster_name']))
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         datalab.fab.append_result("Failed to stop Data Engine.", str(err))
         sys.exit(1)
 
@@ -81,7 +71,7 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"service_base_name": data_engine_config['service_base_name'],
                    "Action": "Stop Data Engine"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/aws/dataengine_terminate.py b/infrastructure-provisioning/src/general/scripts/aws/dataengine_terminate.py
index f0bfe5a..4ecb377 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/dataengine_terminate.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/dataengine_terminate.py
@@ -24,22 +24,22 @@
 import datalab.actions_lib
 import datalab.fab
 import json
-import logging
 import os
 import sys
 import traceback
+from datalab.logger import logging
 
 
 def terminate_data_engine(tag_name, notebook_name,
                           os_user, key_path,
                           cluster_name, remote_kernel_name):
-    print("Terminating data engine cluster")
+    logging.info("Terminating data engine cluster")
     try:
         datalab.actions_lib.remove_ec2(os.environ['conf_tag_resource_id'], cluster_name)
     except:
         sys.exit(1)
 
-    print("Removing Data Engine kernels from notebook")
+    logging.info("Removing Data Engine kernels from notebook")
     try:
         datalab.actions_lib.remove_dataengine_kernels(tag_name, notebook_name,
                                                       os_user, key_path, remote_kernel_name)
@@ -57,7 +57,7 @@ if __name__ == "__main__":
                         level=logging.DEBUG,
                         filename=local_log_filepath)
     # generating variables dictionary
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     datalab.actions_lib.create_aws_config_files()
     data_engine = dict()
     
@@ -82,7 +82,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[TERMINATE DATA ENGINE]')
-        print('[TERMINATE DATA ENGINE]')
         try:
             terminate_data_engine(data_engine['tag_name'],
                                   data_engine['notebook_name'],
@@ -101,7 +100,7 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"service_base_name": data_engine['service_base_name'],
                    "Action": "Terminate Data Engine"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/aws/deeplearning_configure.py b/infrastructure-provisioning/src/general/scripts/aws/deeplearning_configure.py
index 4c18945..d1d7c4e 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/deeplearning_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/deeplearning_configure.py
@@ -26,12 +26,12 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
 import os
 import sys
 import traceback
 import subprocess
 from fabric import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--uuid', type=str, default='')
@@ -39,12 +39,6 @@ args = parser.parse_args()
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         notebook_config = dict()
         try:
@@ -113,7 +107,6 @@ if __name__ == "__main__":
             notebook_config['sudo_group'] = 'wheel'
 
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             instance_hostname, "{}{}.pem".format(os.environ['conf_key_dir'], os.environ['conf_key_name']),
             notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
@@ -131,7 +124,6 @@ if __name__ == "__main__":
     # configuring proxy on Notebook instance
     try:
         logging.info('[CONFIGURE PROXY ON DEEP LEARNING INSTANCE]')
-        print('[CONFIGURE PROXY ON DEEP LEARNING  INSTANCE]')
         additional_config = {"proxy_host": edge_instance_hostname, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}"\
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
@@ -147,7 +139,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": notebook_config['user_keyname'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -166,7 +157,6 @@ if __name__ == "__main__":
     # updating repositories & installing python packages
     try:
         logging.info('[INSTALLING PREREQUISITES TO DEEPLEARNING NOTEBOOK INSTANCE]')
-        print('[INSTALLING PREREQUISITES TO DEEPLEARNING NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}".format(
             instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['aws_region'],
             edge_instance_private_ip)
@@ -182,7 +172,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CONFIGURE DEEP LEARNING NOTEBOOK INSTANCE]')
-        print('[CONFIGURE DEEP LEARNING NOTEBOOK INSTANCE]')
         params = "--hostname {0} --keyfile {1} " \
                  "--os_user {2} --jupyter_version {3} " \
                  "--scala_version {4} --spark_version {5} " \
@@ -204,7 +193,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
@@ -220,7 +208,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[POST CONFIGURING PROCESS]')
-        print('[POST CONFIGURING PROCESS')
         if notebook_config['notebook_image_name'] not in [notebook_config['expected_image_name'], 'None', '']:
             params = "--hostname {} --keyfile {} --os_user {} --nb_tag_name {} --nb_tag_value {}" \
                 .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
@@ -236,7 +223,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         additional_info = {
             'instance_hostname': instance_hostname,
@@ -260,17 +246,17 @@ if __name__ == "__main__":
             datalab.fab.append_result("Failed edge reverse proxy template")
             raise Exception
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         datalab.fab.append_result("Failed edge reverse proxy template.", str(err))
         datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
 
     if notebook_config['image_enabled'] == 'true':
         try:
-            print('[CREATING AMI]')
+            logging.info('[CREATING AMI]')
             ami_id = datalab.meta_lib.get_ami_id_by_name(notebook_config['expected_image_name'])
             if ami_id == '' and notebook_config['shared_image_enabled'] == 'false':
-                print("Looks like it's first time we configure notebook server. Creating image.")
+                logging.info("Looks like it's first time we configure notebook server. Creating image.")
                 try:
                     os.environ['conf_additional_tags'] = '{2};project_tag:{0};endpoint_tag:{1};'.format(
                         notebook_config['project_name'], notebook_config['endpoint_name'],
@@ -282,7 +268,7 @@ if __name__ == "__main__":
                     tag_name=notebook_config['tag_name'], instance_name=notebook_config['instance_name'],
                     image_name=notebook_config['expected_image_name'])
                 if image_id != '':
-                    print("Image was successfully created. It's ID is {}".format(image_id))
+                    logging.info("Image was successfully created. It's ID is {}".format(image_id))
             else:
                 try:
                     os.environ['conf_additional_tags'] = '{};ami:shared;endpoint_tag:{};'.format(
@@ -294,7 +280,7 @@ if __name__ == "__main__":
                     tag_name=notebook_config['tag_name'], instance_name=notebook_config['instance_name'],
                     image_name=notebook_config['expected_image_name'])
                 if image_id != '':
-                    print("Image was successfully created. It's ID is {}".format(image_id))
+                    logging.info("Image was successfully created. It's ID is {}".format(image_id))
         except Exception as err:
             datalab.fab.append_result("Failed creating image.", str(err))
             datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
@@ -313,24 +299,24 @@ if __name__ == "__main__":
         tensorboard_access_url = "https://{}/{}-tensor/".format(notebook_config['edge_instance_hostname'],
                                                                 notebook_config['exploratory_name'])
         ungit_ip_url = "http://" + ip_address + ":8085/{}-ungit/".format(notebook_config['exploratory_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['instance_name']))
-        print("Private DNS: {}".format(dns_name))
-        print("Private IP: {}".format(ip_address))
-        print("Instance ID: {}".format(datalab.meta_lib.get_instance_by_name(notebook_config['tag_name'],
+        logging.info('[SUMMARY]')
+        logging.info("Instance name: {}".format(notebook_config['instance_name']))
+        logging.info("Private DNS: {}".format(dns_name))
+        logging.info("Private IP: {}".format(ip_address))
+        logging.info("Instance ID: {}".format(datalab.meta_lib.get_instance_by_name(notebook_config['tag_name'],
                                                                              notebook_config['instance_name'])))
-        print("Instance type: {}".format(notebook_config['instance_type']))
-        print("Key name: {}".format(notebook_config['key_name']))
-        print("User key name: {}".format(notebook_config['user_keyname']))
-        print("AMI name: {}".format(notebook_config['notebook_image_name']))
-        print("Profile name: {}".format(notebook_config['role_profile_name']))
-        print("SG name: {}".format(notebook_config['security_group_name']))
+        logging.info("Instance type: {}".format(notebook_config['instance_type']))
+        logging.info("Key name: {}".format(notebook_config['key_name']))
+        logging.info("User key name: {}".format(notebook_config['user_keyname']))
+        logging.info("AMI name: {}".format(notebook_config['notebook_image_name']))
+        logging.info("Profile name: {}".format(notebook_config['role_profile_name']))
+        logging.info("SG name: {}".format(notebook_config['security_group_name']))
 
-        print("Ungit URL: {}".format(ungit_ip_url))
-        print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.
+        logging.info("Ungit URL: {}".format(ungit_ip_url))
+        logging.info('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.
               format(notebook_config['key_name'], notebook_config['datalab_ssh_user'], ip_address))
-        print('SSH access (from Edge node, via FQDN): ssh -i {0}.pem {1}@{2}'.
+        logging.info('SSH access (from Edge node, via FQDN): ssh -i {0}.pem {1}@{2}'.
               format(notebook_config['key_name'], notebook_config['datalab_ssh_user'], dns_name))
 
         with open("/root/result.json", 'w') as result:
diff --git a/infrastructure-provisioning/src/general/scripts/aws/edge_associate_elastic_ip.py b/infrastructure-provisioning/src/general/scripts/aws/edge_associate_elastic_ip.py
index 8f5eb71..5f97367 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/edge_associate_elastic_ip.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/edge_associate_elastic_ip.py
@@ -27,6 +27,7 @@ import sys
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--elastic_ip', type=str, default='')
@@ -36,15 +37,9 @@ parser.add_argument('--infra_tag_value', type=str, default='')
 args = parser.parse_args()
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         if args.elastic_ip == 'None':
-            print("Allocating Elastic IP")
+            logging.info("Allocating Elastic IP")
             allocation_id = allocate_elastic_ip()
             tag = {"Key": args.infra_tag_name, "Value": args.infra_tag_value}
             tag_name = {"Key": "Name", "Value": args.infra_tag_value}
@@ -53,8 +48,8 @@ if __name__ == "__main__":
         else:
             allocation_id = get_allocation_id_by_elastic_ip(args.elastic_ip)
 
-        print("Associating Elastic IP to Edge")
+        logging.info("Associating Elastic IP to Edge")
         associate_elastic_ip(args.edge_id, allocation_id)
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/edge_configure.py b/infrastructure-provisioning/src/general/scripts/aws/edge_configure.py
index d79023c..e688d3c 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/edge_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/edge_configure.py
@@ -25,23 +25,15 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
 import os
 import sys
 import traceback
 import uuid
 import subprocess
 from fabric import *
+from datalab.logger import logging
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/edge/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
-
     def clear_resources():
         datalab.actions_lib.remove_all_iam_resources('notebook', edge_conf['project_name'])
         datalab.actions_lib.remove_all_iam_resources('edge', edge_conf['project_name'])
@@ -52,7 +44,7 @@ if __name__ == "__main__":
         datalab.actions_lib.remove_s3('edge', edge_conf['project_name'])
 
     try:
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         edge_conf = dict()
         edge_conf['service_base_name'] = os.environ['conf_service_base_name'] = datalab.fab.replace_multi_symbols(
             os.environ['conf_service_base_name'][:20], '-', True)
@@ -136,7 +128,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             edge_conf['instance_hostname'], os.environ['conf_key_dir'] + os.environ['conf_key_name'] + ".pem",
             edge_conf['initial_user'], edge_conf['datalab_ssh_user'], edge_conf['sudo_group'])
@@ -152,7 +143,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING PREREQUISITES]')
         logging.info('[INSTALLING PREREQUISITES]')
         params = "--hostname {} --keyfile {} --user {} --region {}". \
             format(edge_conf['instance_hostname'], edge_conf['keyfile_name'], edge_conf['datalab_ssh_user'],
@@ -162,19 +152,19 @@ if __name__ == "__main__":
         except:
             traceback.print_exc()
             raise Exception
-        print('RESTARTING EDGE NODE')
+        logging.info('RESTARTING EDGE NODE')
         try:
-            print('Stoping EDGE node')
+            logging.info('Stoping EDGE node')
             datalab.actions_lib.stop_ec2(edge_conf['tag_name'], edge_conf['instance_name'])
         except Exception as err:
-            print('Error: {0}'.format(err))
+            logging.error('Error: {0}'.format(err))
             datalab.fab.append_result("Failed to stop edge.", str(err))
             sys.exit(1)
         try:
-            print('Starting EDGE node')
+            logging.info('Starting EDGE node')
             datalab.actions_lib.start_ec2(edge_conf['tag_name'], edge_conf['instance_name'])
         except Exception as err:
-            print('Error: {0}'.format(err))
+            logging.error('Error: {0}'.format(err))
             datalab.fab.append_result("Failed to start edge.", str(err))
             sys.exit(1)
     except Exception as err:
@@ -183,7 +173,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING HTTP PROXY]')
         logging.info('[INSTALLING HTTP PROXY]')
         additional_config = {"exploratory_subnet": edge_conf['private_subnet_cidr'],
                              "template_file": "/root/templates/squid.conf",
@@ -209,7 +198,6 @@ if __name__ == "__main__":
 
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": edge_conf['project_name'],
                              "user_keydir": os.environ['conf_key_dir'],
@@ -228,7 +216,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING NGINX REVERSE PROXY]')
         logging.info('[INSTALLING NGINX REVERSE PROXY]')
         edge_conf['keycloak_client_secret'] = str(uuid.uuid4())
         params = "--hostname {} --keyfile {} --user {} --keycloak_client_id {} --keycloak_client_secret {} " \
@@ -265,9 +252,9 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[CONFIGRING EDGE AS NAT]')
+        logging.info('[CONFIGRING EDGE AS NAT]')
         if os.environ['edge_is_nat'] == 'true':
-            print('Installing nftables')
+            logging.info('Installing nftables')
             additional_config = {"exploratory_subnet": edge_conf['private_subnet_cidr'],
                                  "edge_ip": edge_conf['edge_private_ip']}
             params = "--hostname {} --keyfile {} --additional_config '{}' --user {}".format(
@@ -284,21 +271,21 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(edge_conf['instance_name']))
-        print("Hostname: {}".format(edge_conf['instance_hostname']))
-        print("Public IP: {}".format(edge_conf['edge_public_ip']))
-        print("Private IP: {}".format(edge_conf['edge_private_ip']))
-        print("Instance ID: {}".format(datalab.meta_lib.get_instance_by_name(edge_conf['tag_name'],
+        logging.info('[SUMMARY]')
+        logging.info("Instance name: {}".format(edge_conf['instance_name']))
+        logging.info("Hostname: {}".format(edge_conf['instance_hostname']))
+        logging.info("Public IP: {}".format(edge_conf['edge_public_ip']))
+        logging.info("Private IP: {}".format(edge_conf['edge_private_ip']))
+        logging.info("Instance ID: {}".format(datalab.meta_lib.get_instance_by_name(edge_conf['tag_name'],
                                                                              edge_conf['instance_name'])))
-        print("Key name: {}".format(edge_conf['key_name']))
-        print("Bucket name: {}".format(edge_conf['bucket_name']))
-        print("Shared bucket name: {}".format(edge_conf['shared_bucket_name']))
-        print("Notebook SG: {}".format(edge_conf['notebook_security_group_name']))
-        print("Notebook profiles: {}".format(edge_conf['notebook_role_profile_name']))
-        print("Edge SG: {}".format(edge_conf['edge_security_group_name']))
-        print("Notebook subnet: {}".format(edge_conf['private_subnet_cidr']))
+        logging.info("Key name: {}".format(edge_conf['key_name']))
+        logging.info("Bucket name: {}".format(edge_conf['bucket_name']))
+        logging.info("Shared bucket name: {}".format(edge_conf['shared_bucket_name']))
+        logging.info("Notebook SG: {}".format(edge_conf['notebook_security_group_name']))
+        logging.info("Notebook profiles: {}".format(edge_conf['notebook_role_profile_name']))
+        logging.info("Edge SG: {}".format(edge_conf['edge_security_group_name']))
+        logging.info("Notebook subnet: {}".format(edge_conf['private_subnet_cidr']))
         with open("/root/result.json", 'w') as result:
             res = {"hostname": edge_conf['instance_hostname'],
                    "public_ip": edge_conf['edge_public_ip'],
@@ -318,7 +305,7 @@ if __name__ == "__main__":
                    "project_name": edge_conf['project_name'],
                    "@class": "com.epam.datalab.dto.aws.edge.EdgeInfoAws",
                    "Action": "Create new EDGE server"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results.", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/aws/edge_start.py b/infrastructure-provisioning/src/general/scripts/aws/edge_start.py
index 893fb89..fcb0019 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/edge_start.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/edge_start.py
@@ -25,21 +25,14 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
 import os
 import sys
+from datalab.logger import logging
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     # generating variables dictionary
     datalab.actions_lib.create_aws_config_files()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     edge_conf = dict()
     edge_conf['service_base_name'] = (os.environ['conf_service_base_name'])
     edge_conf['project_name'] = os.environ['project_name']
@@ -49,11 +42,10 @@ if __name__ == "__main__":
     edge_conf['tag_name'] = edge_conf['service_base_name'] + '-tag'
 
     logging.info('[START EDGE]')
-    print('[START EDGE]')
     try:
         datalab.actions_lib.start_ec2(edge_conf['tag_name'], edge_conf['instance_name'])
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         datalab.fab.append_result("Failed to start edge.", str(err))
         sys.exit(1)
 
@@ -62,19 +54,19 @@ if __name__ == "__main__":
         addresses = datalab.meta_lib.get_instance_ip_address(edge_conf['tag_name'], edge_conf['instance_name'])
         ip_address = addresses.get('Private')
         public_ip_address = addresses.get('Public')
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(edge_conf['instance_name']))
-        print("Hostname: {}".format(instance_hostname))
-        print("Public IP: {}".format(public_ip_address))
-        print("Private IP: {}".format(ip_address))
+        logging.info('[SUMMARY]')
+        logging.info("Instance name: {}".format(edge_conf['instance_name']))
+        logging.info("Hostname: {}".format(instance_hostname))
+        logging.info("Public IP: {}".format(public_ip_address))
+        logging.info("Private IP: {}".format(ip_address))
         with open("/root/result.json", 'w') as result:
             res = {"instance_name": edge_conf['instance_name'],
                    "hostname": instance_hostname,
                    "public_ip": public_ip_address,
                    "ip": ip_address,
                    "Action": "Start up notebook server"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/aws/edge_status.py b/infrastructure-provisioning/src/general/scripts/aws/edge_status.py
index 591fc6d..7d96041 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/edge_status.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/edge_status.py
@@ -24,27 +24,19 @@
 import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
-import logging
 import os
 import sys
 import traceback
 from fabric import *
+from datalab.logger import logging
 import subprocess
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/edge/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     datalab.actions_lib.create_aws_config_files()
-    print('Getting statuses of DataLab resources')
+    logging.info('Getting statuses of DataLab resources')
 
     try:
         logging.info('[COLLECT DATA]')
-        print('[COLLECTING DATA]')
         params = '--list_resources "{}"'.format(os.environ['edge_list_resources'])
         try:
             subprocess.run("~/scripts/{}.py {}".format('common_collect_data', params), shell=True, check=True)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/edge_stop.py b/infrastructure-provisioning/src/general/scripts/aws/edge_stop.py
index 062e8d3..e879f3e 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/edge_stop.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/edge_stop.py
@@ -25,21 +25,14 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
 import os
 import sys
+from datalab.logger import logging
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     # generating variables dictionary
     datalab.actions_lib.create_aws_config_files()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     edge_conf = dict()
     edge_conf['service_base_name'] = (os.environ['conf_service_base_name'])
     edge_conf['project_name'] = os.environ['project_name']
@@ -49,7 +42,6 @@ if __name__ == "__main__":
     edge_conf['tag_name'] = edge_conf['service_base_name'] + '-tag'
 
     logging.info('[STOP EDGE]')
-    print('[STOP EDGE]')
     try:
         datalab.actions_lib.stop_ec2(edge_conf['tag_name'], edge_conf['instance_name'])
     except Exception as err:
@@ -60,7 +52,7 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"instance_name": edge_conf['instance_name'],
                    "Action": "Stop edge server"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/aws/jupyter_configure.py b/infrastructure-provisioning/src/general/scripts/aws/jupyter_configure.py
index b040f03..0bc9503 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/jupyter_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/jupyter_configure.py
@@ -26,12 +26,12 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
 import os
 import sys
 import traceback
 import subprocess
 from fabric import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--uuid', type=str, default='')
@@ -39,12 +39,6 @@ args = parser.parse_args()
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         notebook_config = dict()
         try:
@@ -113,7 +107,6 @@ if __name__ == "__main__":
             notebook_config['sudo_group'] = 'wheel'
 
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             instance_hostname, "{}{}.pem".format(os.environ['conf_key_dir'], os.environ['conf_key_name']),
             notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
@@ -131,7 +124,6 @@ if __name__ == "__main__":
     # configuring proxy on Notebook instance
     try:
         logging.info('[CONFIGURE PROXY ON JUPYTER INSTANCE]')
-        print('[CONFIGURE PROXY ON JUPYTER INSTANCE]')
         additional_config = {"proxy_host": edge_instance_hostname, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}".format(
             instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
@@ -149,7 +141,6 @@ if __name__ == "__main__":
     # updating repositories & installing python packages
     try:
         logging.info('[INSTALLING PREREQUISITES TO JUPYTER NOTEBOOK INSTANCE]')
-        print('[INSTALLING PREREQUISITES TO JUPYTER NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}".format(
             instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['aws_region'],
             edge_instance_private_ip)
@@ -166,7 +157,6 @@ if __name__ == "__main__":
     # installing and configuring jupiter and all dependencies
     try:
         logging.info('[CONFIGURE JUPYTER NOTEBOOK INSTANCE]')
-        print('[CONFIGURE JUPYTER NOTEBOOK INSTANCE]')
         params = "--hostname {0} " \
                  "--keyfile {1} " \
                  "--region {2} " \
@@ -200,7 +190,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": notebook_config['user_keyname'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -217,7 +206,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
@@ -234,7 +222,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[POST CONFIGURING PROCESS]')
-        print('[POST CONFIGURING PROCESS')
         if notebook_config['notebook_image_name'] not in [notebook_config['expected_image_name'], 'None', '']:
             params = "--hostname {} --keyfile {} --os_user {} --nb_tag_name {} --nb_tag_value {}" \
                 .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
@@ -250,7 +237,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         additional_info = {
             'instance_hostname': instance_hostname,
@@ -280,10 +266,10 @@ if __name__ == "__main__":
 
     if notebook_config['image_enabled'] == 'true':
         try:
-            print('[CREATING AMI]')
+            logging.info('[CREATING AMI]')
             ami_id = datalab.meta_lib.get_ami_id_by_name(notebook_config['expected_image_name'])
             if ami_id == '' and notebook_config['shared_image_enabled'] == 'false':
-                print("Looks like it's first time we configure notebook server. Creating image.")
+                logging.info("Looks like it's first time we configure notebook server. Creating image.")
                 try:
                     os.environ['conf_additional_tags'] = '{2};project_tag:{0};endpoint_tag:{1};'.format(
                         os.environ['project_name'], os.environ['endpoint_name'], os.environ['conf_additional_tags'])
@@ -294,9 +280,9 @@ if __name__ == "__main__":
                     tag_name=notebook_config['tag_name'], instance_name=notebook_config['instance_name'],
                     image_name=notebook_config['expected_image_name'])
                 if image_id != '':
-                    print("Image was successfully created. It's ID is {}".format(image_id))
+                    logging.info("Image was successfully created. It's ID is {}".format(image_id))
             else:
-                print("Looks like it's first time we configure notebook server. Creating image.")
+                logging.info("Looks like it's first time we configure notebook server. Creating image.")
                 try:
                     os.environ['conf_additional_tags'] = '{};ami:shared;endpoint_tag:{};'.format(
                         os.environ['conf_additional_tags'], os.environ['endpoint_name'])
@@ -306,7 +292,7 @@ if __name__ == "__main__":
                     tag_name=notebook_config['tag_name'], instance_name=notebook_config['instance_name'],
                     image_name=notebook_config['expected_image_name'])
                 if image_id != '':
-                    print("Image was successfully created. It's ID is {}".format(image_id))
+                    logging.info("Image was successfully created. It's ID is {}".format(image_id))
         except Exception as err:
             datalab.fab.append_result("Failed creating image.", str(err))
             datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
@@ -324,27 +310,27 @@ if __name__ == "__main__":
         jupyter_ungit_access_url = "https://{}/{}-ungit/".format(notebook_config['edge_instance_hostname'],
                                                                  notebook_config['exploratory_name'])
         ungit_ip_url = "http://" + ip_address + ":8085/{}-ungit/".format(notebook_config['exploratory_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['instance_name']))
-        print("Private DNS: {}".format(dns_name))
-        print("Private IP: {}".format(ip_address))
-        print("Instance ID: {}".format(datalab.meta_lib.get_instance_by_name(notebook_config['tag_name'],
+        logging.info('[SUMMARY]')
+        logging.info("Instance name: {}".format(notebook_config['instance_name']))
+        logging.info("Private DNS: {}".format(dns_name))
+        logging.info("Private IP: {}".format(ip_address))
+        logging.info("Instance ID: {}".format(datalab.meta_lib.get_instance_by_name(notebook_config['tag_name'],
                                                                              notebook_config['instance_name'])))
-        print("Instance type: {}".format(notebook_config['instance_type']))
-        print("Key name: {}".format(notebook_config['key_name']))
-        print("User key name: {}".format(notebook_config['user_keyname']))
-        print("Image name: {}".format(notebook_config['notebook_image_name']))
-        print("Profile name: {}".format(notebook_config['role_profile_name']))
-        print("SG name: {}".format(notebook_config['security_group_name']))
-        print("Jupyter URL: {}".format(jupyter_ip_url))
-        print("Jupyter URL: {}".format(jupyter_dns_url))
-        print("Ungit URL: {}".format(ungit_ip_url))
-        print("ReverseProxyNotebook".format(jupyter_notebook_access_url))
-        print("ReverseProxyUngit".format(jupyter_ungit_access_url))
-        print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.
+        logging.info("Instance type: {}".format(notebook_config['instance_type']))
+        logging.info("Key name: {}".format(notebook_config['key_name']))
+        logging.info("User key name: {}".format(notebook_config['user_keyname']))
+        logging.info("Image name: {}".format(notebook_config['notebook_image_name']))
+        logging.info("Profile name: {}".format(notebook_config['role_profile_name']))
+        logging.info("SG name: {}".format(notebook_config['security_group_name']))
+        logging.info("Jupyter URL: {}".format(jupyter_ip_url))
+        logging.info("Jupyter URL: {}".format(jupyter_dns_url))
+        logging.info("Ungit URL: {}".format(ungit_ip_url))
+        logging.info("ReverseProxyNotebook".format(jupyter_notebook_access_url))
+        logging.info("ReverseProxyUngit".format(jupyter_ungit_access_url))
+        logging.info('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.
               format(notebook_config['key_name'], notebook_config['datalab_ssh_user'], ip_address))
-        print('SSH access (from Edge node, via FQDN): ssh -i {0}.pem {1}@{2}'.
+        logging.info('SSH access (from Edge node, via FQDN): ssh -i {0}.pem {1}@{2}'.
               format(notebook_config['key_name'], notebook_config['datalab_ssh_user'], dns_name))
 
         with open("/root/result.json", 'w') as result:
diff --git a/infrastructure-provisioning/src/general/scripts/aws/jupyter_dataengine-service_create_configs.py b/infrastructure-provisioning/src/general/scripts/aws/jupyter_dataengine-service_create_configs.py
index ca5277f..9a9450d 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/jupyter_dataengine-service_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/jupyter_dataengine-service_create_configs.py
@@ -24,6 +24,7 @@
 import argparse
 import sys
 import subprocess
+from datalab.logger import logging
 #from datalab.actions_lib import *
 #from datalab.common_lib import *
 #from datalab.fab import *
diff --git a/infrastructure-provisioning/src/general/scripts/aws/jupyter_install_dataengine-service_kernels.py b/infrastructure-provisioning/src/general/scripts/aws/jupyter_install_dataengine-service_kernels.py
index 91a93e8..0efb00f 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/jupyter_install_dataengine-service_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/jupyter_install_dataengine-service_kernels.py
@@ -28,6 +28,7 @@ import time
 import subprocess
 from datalab.meta_lib import *
 from fabric import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--bucket', type=str, default='')
@@ -117,15 +118,15 @@ if __name__ == "__main__":
                             '/tmp/python_version')
     with open('/tmp/scala_version') as f:
         args.scala_version = str(f.read()).rstrip()
-        print(args.scala_version)
+        logging.info(args.scala_version)
     with open('/tmp/python_version') as f:
         args.python_version = str(f.read()).rstrip()
-        print(args.python_version)
+        logging.info(args.python_version)
     if r_enabled == 'true':
         s3_client.download_file(args.bucket, args.project_name + '/' + args.cluster_name + '/r_version', '/tmp/r_version')
         with open('/tmp/r_version') as g:
             args.r_version = str(g.read()).rstrip()
-            print(args.r_version)
+            logging.info(args.r_version)
     else:
         r_version = 'false'
     cluster_id = get_emr_id_by_name(args.cluster_name)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/jupyterlab_configure.py b/infrastructure-provisioning/src/general/scripts/aws/jupyterlab_configure.py
index 425c08d..6fe68e3 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/jupyterlab_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/jupyterlab_configure.py
@@ -26,12 +26,12 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
 import os
 import sys
 import traceback
 import subprocess
 from fabric import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--uuid', type=str, default='')
@@ -39,12 +39,6 @@ args = parser.parse_args()
 
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         notebook_config = dict()
         try:
@@ -113,7 +107,6 @@ if __name__ == "__main__":
             notebook_config['sudo_group'] = 'wheel'
 
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format \
             (instance_hostname, "{}{}.pem".format(os.environ['conf_key_dir'], os.environ['conf_key_name']),
              notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
@@ -131,7 +124,6 @@ if __name__ == "__main__":
     # configuring proxy on Notebook instance
     try:
         logging.info('[CONFIGURE PROXY ON JUPYTERLAB INSTANCE]')
-        print('[CONFIGURE PROXY ON JUPYTERLAB INSTANCE]')
         additional_config = {"proxy_host": edge_instance_hostname, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}"\
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
@@ -149,7 +141,6 @@ if __name__ == "__main__":
     # updating repositories & installing python packages
     try:
         logging.info('[INSTALLING PREREQUISITES TO JUPYTERLAB NOTEBOOK INSTANCE]')
-        print('[INSTALLING PREREQUISITES TO JUPYTERLAB NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --user {} --region {}". \
             format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['aws_region'])
         try:
@@ -165,7 +156,6 @@ if __name__ == "__main__":
     # installing and configuring jupiter and all dependencies
     try:
         logging.info('[CONFIGURE JUPYTERLAB NOTEBOOK INSTANCE]')
-        print('[CONFIGURE JUPYTERLAB NOTEBOOK INSTANCE]')
         params = "--hostname {} " \
                  "--keyfile {} " \
                  "--edge_ip {} " \
@@ -199,7 +189,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": notebook_config['user_keyname'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -216,7 +205,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
@@ -233,7 +221,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[POST CONFIGURING PROCESS]')
-        print('[POST CONFIGURING PROCESS')
         if notebook_config['notebook_image_name'] not in [notebook_config['expected_image_name'], 'None', '']:
             params = "--hostname {} --keyfile {} --os_user {} --nb_tag_name {} --nb_tag_value {}" \
                 .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
@@ -249,7 +236,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         additional_info = {
             'instance_hostname': instance_hostname,
@@ -279,22 +265,21 @@ if __name__ == "__main__":
 
     if notebook_config['image_enabled'] == 'true':
         try:
-            print('[CREATING AMI]')
+            logging.info('[CREATING AMI]')
             ami_id = datalab.meta_lib.get_ami_id_by_name(notebook_config['expected_image_name'])
             if ami_id == '':
-                print("Looks like it's first time we configure notebook server. Creating image.")
+                logging.info("Looks like it's first time we configure notebook server. Creating image.")
                 image_id = datalab.actions_lib.create_image_from_instance(
                     tag_name=notebook_config['tag_name'], instance_name=notebook_config['instance_name'],
                     image_name=notebook_config['expected_image_name'])
                 if image_id != '':
-                    print("Image was successfully created. It's ID is {}".format(image_id))
+                    logging.info("Image was successfully created. It's ID is {}".format(image_id))
         except Exception as err:
             datalab.fab.append_result("Failed creating image.", str(err))
             datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
             sys.exit(1)
 
     try:
-        print('[CONFIGURING PROXY FOR DOCKER]')
         logging.info('[CONFIGURING PROXY FOR DOCKER]')
         params = "--hostname {} " \
                  "--keyfile {} " \
@@ -313,7 +298,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[STARTING JUPYTER CONTAINER]')
         logging.info('[STARTING JUPYTER CONTAINER]')
         params = "--hostname {} " \
                  "--keyfile {} " \
@@ -343,27 +327,27 @@ if __name__ == "__main__":
         jupyter_ungit_acces_url = "http://{}/{}-ungit/".format(notebook_config['edge_instance_hostname'],
                                                                notebook_config['exploratory_name'])
         ungit_ip_url = "http://" + ip_address + ":8085/{}-ungit/".format(notebook_config['exploratory_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['instance_name']))
-        print("Private DNS: {}".format(dns_name))
-        print("Private IP: {}".format(ip_address))
-        print("Instance ID: {}".format(datalab.meta_lib.get_instance_by_name(notebook_config['tag_name'],
+        logging.info('[SUMMARY]')
+        logging.info("Instance name: {}".format(notebook_config['instance_name']))
+        logging.info("Private DNS: {}".format(dns_name))
+        logging.info("Private IP: {}".format(ip_address))
+        logging.info("Instance ID: {}".format(datalab.meta_lib.get_instance_by_name(notebook_config['tag_name'],
                                                                              notebook_config['instance_name'])))
-        print("Instance type: {}".format(notebook_config['instance_type']))
-        print("Key name: {}".format(notebook_config['key_name']))
-        print("User key name: {}".format(notebook_config['user_keyname']))
-        print("Image name: {}".format(notebook_config['notebook_image_name']))
-        print("Profile name: {}".format(notebook_config['role_profile_name']))
-        print("SG name: {}".format(notebook_config['security_group_name']))
-        print("JupyterLab URL: {}".format(jupyter_ip_url))
-        print("JupyterLab URL: {}".format(jupyter_dns_url))
-        print("Ungit URL: {}".format(ungit_ip_url))
-        print("ReverseProxyNotebook".format(jupyter_notebook_acces_url))
-        print("ReverseProxyUngit".format(jupyter_ungit_acces_url))
-        print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.
+        logging.info("Instance type: {}".format(notebook_config['instance_type']))
+        logging.info("Key name: {}".format(notebook_config['key_name']))
+        logging.info("User key name: {}".format(notebook_config['user_keyname']))
+        logging.info("Image name: {}".format(notebook_config['notebook_image_name']))
+        logging.info("Profile name: {}".format(notebook_config['role_profile_name']))
+        logging.info("SG name: {}".format(notebook_config['security_group_name']))
+        logging.info("JupyterLab URL: {}".format(jupyter_ip_url))
+        logging.info("JupyterLab URL: {}".format(jupyter_dns_url))
+        logging.info("Ungit URL: {}".format(ungit_ip_url))
+        logging.info("ReverseProxyNotebook".format(jupyter_notebook_acces_url))
+        logging.info("ReverseProxyUngit".format(jupyter_ungit_acces_url))
+        logging.info('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.
               format(notebook_config['key_name'], notebook_config['datalab_ssh_user'], ip_address))
-        print('SSH access (from Edge node, via FQDN): ssh -i {0}.pem {1}@{2}'.
+        logging.info('SSH access (from Edge node, via FQDN): ssh -i {0}.pem {1}@{2}'.
               format(notebook_config['key_name'], notebook_config['datalab_ssh_user'], dns_name))
 
         with open("/root/result.json", 'w') as result:
diff --git a/infrastructure-provisioning/src/general/scripts/aws/odahu_deploy.py b/infrastructure-provisioning/src/general/scripts/aws/odahu_deploy.py
index 64a67a8..f40e33b 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/odahu_deploy.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/odahu_deploy.py
@@ -21,25 +21,18 @@
 #
 # ******************************************************************************
 
-import logging
 import json
 import sys
 from datalab.fab import *
 from datalab.meta_lib import *
 from datalab.actions_lib import *
+from datalab.logger import logging
 import os
 import base64
 import subprocess
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/project/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     odahu_conf = dict()
     odahu_conf['allowed_cidr'] = os.environ['odahu_allowed_cidr'].split(',')
     odahu_conf['project_id'] = (os.environ['gcp_project_id'])
@@ -105,7 +98,7 @@ if __name__ == "__main__":
     odahu_conf['tester_secret'] = os.environ['odahu_tester_secret']
     odahu_conf['tester-data-scientist_secret'] = os.environ['odahu_tester_data_scientist_secret']
 
-    print('Preparing parameters file')
+    logging.info('Preparing parameters file')
     try:
         subprocess.run("cp /root/templates/profile.json /tmp/", shell=True, check=True)
         with open("/tmp/profile.json", 'w') as profile:
@@ -277,13 +270,13 @@ if __name__ == "__main__":
         sys.exit(1)
 
     # generating output information
-    print('[SUMMARY]')
     logging.info('[SUMMARY]')
-    print('Cluster name: {}'.format(odahu_conf['cluster_name']))
+    logging.info('[SUMMARY]')
+    logging.info('Cluster name: {}'.format(odahu_conf['cluster_name']))
     with open('/tmp/result.json', 'r') as f:
         output = json.load(f)
         odahu_urls = json.dumps(output['odahu_urls']['value'], sort_keys=True, indent=4)
-    print('Odahu urls: {}'.format(odahu_urls))
+    logging.info('Odahu urls: {}'.format(odahu_urls))
     res = dict()
     res['odahu_urls'] = output['odahu_urls']['value']
     res['oauth_cookie_secret'] = odahu_conf['oauth_cookie_secret']
diff --git a/infrastructure-provisioning/src/general/scripts/aws/odahu_prepare.py b/infrastructure-provisioning/src/general/scripts/aws/odahu_prepare.py
index 4a8e9e7..61018b3 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/odahu_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/odahu_prepare.py
@@ -21,7 +21,6 @@
 #
 # ******************************************************************************
 
-import logging
 import json
 import sys
 import requests
@@ -29,17 +28,11 @@ import subprocess
 from datalab.fab import *
 from datalab.meta_lib import *
 from datalab.actions_lib import *
+from datalab.logger import logging
 import os
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/project/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     odahu_conf = dict()
     odahu_conf['service_base_name'] = (os.environ['conf_service_base_name']).lower().replace('_', '-')
     odahu_conf['project_name'] = (os.environ['project_name']).lower().replace('_', '-')
@@ -68,12 +61,11 @@ if __name__ == "__main__":
         odahu_conf['bucket_additional_tags'] = ''
         os.environ['conf_additional_tags'] = 'project_tag:{0};endpoint_tag:{1}'.format(odahu_conf['project_tag'],
                                                                                        odahu_conf['endpoint_tag'])
-    print('Additional tags will be added: {}'.format(os.environ['conf_additional_tags']))
+    logging.info('Additional tags will be added: {}'.format(os.environ['conf_additional_tags']))
 
 
     try:
         logging.info('[CREATE STATE BUCKETS]')
-        print('[CREATE STATE BUCKETS]')
 
         odahu_conf['bucket_tags'] = 'endpoint_tag:{0};{1}:{2};project_tag:{3};{4}:{5}{6}'\
             .format(odahu_conf['endpoint_tag'], os.environ['conf_billing_tag_key'], os.environ['conf_billing_tag_value'],
@@ -88,29 +80,27 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         append_result("Unable to create bucket.", str(err))
         sys.exit(1)
 
     try:
         logging.info('[CREATE NAT GATEWAY]')
-        print('[CREATE NAT GATEWAY]')
-        print("Allocating Elastic IP")
+        logging.info("Allocating Elastic IP")
         allocation_id = allocate_elastic_ip()
         tag = {"Key": odahu_conf['tag_name'], "Value": odahu_conf['static_address_name']}
         tag_name = {"Key": "Name", "Value": odahu_conf['static_address_name']}
         create_tag(allocation_id, tag)
         create_tag(allocation_id, tag_name)
-        print("Creating NAT")
+        logging.info("Creating NAT")
         create_nat_gatway(allocation_id, odahu_conf['ssn_subnet_id'], odahu_conf['project_tag'])
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         append_result("Unable to Unable to create NAT Gateway.", str(err))
         remove_s3(bucket_type='odahu')
         sys.exit(1)
 
     try:
-        print('[CONFIGURE REDIRECT URI]')
         logging.info('[CONFIGURE REDIRECT URI]')
         keycloak_auth_server_url = '{}/realms/master/protocol/openid-connect/token'.format(
             odahu_conf['keycloak_auth_server_url'])
@@ -152,7 +142,7 @@ if __name__ == "__main__":
             append_result("Failed to configure keycloak.")
             sys.exit(1)
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         append_result("Failed to configure keycloak.", str(err))
         remove_s3(bucket_type='odahu')
         release_elastic_ip(allocation_id)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/project_prepare.py b/infrastructure-provisioning/src/general/scripts/aws/project_prepare.py
index 8fa8836..aa271cd 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/project_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/project_prepare.py
@@ -26,25 +26,18 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
 import os
 import sys
 import time
 import traceback
 import subprocess
 from fabric import *
+from datalab.logger import logging
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/project/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     try:
         datalab.actions_lib.create_aws_config_files()
-        print('Generating infrastructure names and tags')
+        logging.info('Generating infrastructure names and tags')
         project_conf = dict()
         project_conf['service_base_name'] = os.environ['conf_service_base_name'] = datalab.fab.replace_multi_symbols(
             os.environ['conf_service_base_name'][:20], '-', True)
@@ -141,12 +134,12 @@ if __name__ == "__main__":
                 subprocess.run('echo "{0}" >> {1}{2}.pub'.format(project_conf['user_key'], os.environ['conf_key_dir'],
                                                         project_conf['project_name']), shell=True, check=True)
             except:
-                print("ADMINSs PUBLIC KEY DOES NOT INSTALLED")
+                logging.error("ADMINSs PUBLIC KEY DOES NOT INSTALLED")
         except KeyError:
-            print("ADMINSs PUBLIC KEY DOES NOT UPLOADED")
+            logging.error("ADMINSs PUBLIC KEY DOES NOT UPLOADED")
             sys.exit(1)
 
-        print("Will create exploratory environment with edge node as access point as following: {}".
+        logging.info("Will create exploratory environment with edge node as access point as following: {}".
               format(json.dumps(project_conf, sort_keys=True, indent=4, separators=(',', ': '))))
         logging.info(json.dumps(project_conf))
 
@@ -159,7 +152,7 @@ if __name__ == "__main__":
             project_conf['bucket_additional_tags'] = ''
             os.environ['conf_additional_tags'] = 'project_tag:{0};endpoint_tag:{1}'.format(project_conf['project_tag'],
                                                                                            project_conf['endpoint_tag'])
-        print('Additional tags will be added: {}'.format(os.environ['conf_additional_tags']))
+        logging.info('Additional tags will be added: {}'.format(os.environ['conf_additional_tags']))
     except Exception as err:
         datalab.fab.append_result("Failed to generate variables dictionary.", str(err))
         sys.exit(1)
@@ -169,13 +162,13 @@ if __name__ == "__main__":
         try:
             endpoint_id = datalab.meta_lib.get_instance_by_name(project_conf['tag_name'], '{0}-{1}-endpoint'.format(
                 project_conf['service_base_name'], project_conf['endpoint_name']))
-            print("Endpoint id: " + endpoint_id)
+            logging.info("Endpoint id: " + endpoint_id)
             ec2 = boto3.client('ec2')
             ec2.create_tags(Resources=[endpoint_id], Tags=[
                 {'Key': 'project_tag', 'Value': project_conf['project_tag']},
                 {'Key': 'endpoint_tag', 'Value': project_conf['endpoint_tag']}])
         except Exception as err:
-            print("Failed to attach Project tag to Endpoint", str(err))
+            logging.error("Failed to attach Project tag to Endpoint", str(err))
             traceback.print_exc()
             sys.exit(1)
 
@@ -189,7 +182,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATE SUBNET]')
-        print('[CREATE SUBNET]')
         params = "--vpc_id '{}' --infra_tag_name {} --infra_tag_value {} --prefix {} " \
                  "--user_subnets_range '{}' --subnet_name {} --zone {}".format(
                   project_conf['vpc2_id'], project_conf['tag_name'], project_conf['service_base_name'],
@@ -210,12 +202,11 @@ if __name__ == "__main__":
                                                 project_conf['endpoint_name'])}
     project_conf['private_subnet_cidr'] = datalab.meta_lib.get_subnet_by_tag(tag)
     subnet_id = datalab.meta_lib.get_subnet_by_cidr(project_conf['private_subnet_cidr'], project_conf['vpc2_id'])
-    print('Subnet id: {}'.format(subnet_id))
-    print('NEW SUBNET CIDR CREATED: {}'.format(project_conf['private_subnet_cidr']))
+    logging.info('Subnet id: {}'.format(subnet_id))
+    logging.info('NEW SUBNET CIDR CREATED: {}'.format(project_conf['private_subnet_cidr']))
 
     try:
         logging.info('[CREATE EDGE ROLES]')
-        print('[CREATE EDGE ROLES]')
         user_tag = "{0}:{0}-{1}-{2}-edge-role".format(project_conf['service_base_name'], project_conf['project_name'],
                                                       project_conf['endpoint_name'])
         params = "--role_name {} --role_profile_name {} --policy_name {} --region {} --infra_tag_name {} " \
@@ -236,7 +227,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATE BACKEND (NOTEBOOK) ROLES]')
-        print('[CREATE BACKEND (NOTEBOOK) ROLES]')
         user_tag = "{0}:{0}-{1}-{2}-nb-de-role".format(project_conf['service_base_name'], project_conf['project_name'],
                                                        project_conf['endpoint_name'])
         params = "--role_name {} --role_profile_name {} --policy_name {} --region {} --infra_tag_name {} " \
@@ -263,7 +253,6 @@ if __name__ == "__main__":
     except KeyError:
         try:
             logging.info('[CREATE SECURITY GROUP FOR EDGE NODE]')
-            print('[CREATE SECURITY GROUPS FOR EDGE]')
             edge_sg_ingress = datalab.meta_lib.format_sg([
                 {
                     "IpProtocol": "-1",
@@ -429,7 +418,7 @@ if __name__ == "__main__":
                 datalab.fab.append_result("Failed creating security group for edge node.", str(err))
                 raise Exception
 
-            print('Waiting for changes to propagate')
+            logging.info('Waiting for changes to propagate')
             time.sleep(10)
         except:
             datalab.actions_lib.remove_all_iam_resources('notebook', project_conf['project_name'])
@@ -438,7 +427,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATE SECURITY GROUP FOR PRIVATE SUBNET]')
-        print('[CREATE SECURITY GROUP FOR PRIVATE SUBNET]')
         rules_list = []
         sg_list = project_conf['sg_ids'].replace(" ", "").split(',')
         if os.environ['aws_security_groups_ids'] == '':
@@ -509,7 +497,7 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
 
-        print('Waiting for changes to propagate')
+        logging.info('Waiting for changes to propagate')
         time.sleep(10)
     except Exception as err:
         datalab.fab.append_result("Failed creating security group for private subnet.", str(err))
@@ -520,7 +508,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     logging.info('[CREATING SECURITY GROUPS FOR MASTER NODE]')
-    print("[CREATING SECURITY GROUPS FOR MASTER NODE]")
     try:
         params = "--name {} --vpc_id {} --security_group_rules '{}' --egress '{}' --infra_tag_name {} " \
                  "--infra_tag_value {} --force {}".format(project_conf['dataengine_master_security_group_name'],
@@ -542,7 +529,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     logging.info('[CREATING SECURITY GROUPS FOR SLAVE NODES]')
-    print("[CREATING SECURITY GROUPS FOR SLAVE NODES]")
     try:
         params = "--name {} --vpc_id {} --security_group_rules '{}' --egress '{}' --infra_tag_name {} " \
                  "--infra_tag_value {} --force {}".format(project_conf['dataengine_slave_security_group_name'],
@@ -566,7 +552,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATE BUCKETS]')
-        print('[CREATE BUCKETS]')
         project_conf['shared_bucket_tags'] = 'endpoint_tag:{0};{1}:{2};{3}:{4}{5}'.format(
             project_conf['endpoint_tag'], os.environ['conf_billing_tag_key'], os.environ['conf_billing_tag_value'],
             project_conf['tag_name'], project_conf['shared_bucket_name'],
@@ -602,7 +587,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[CREATING BUCKET POLICY FOR PROJECT INSTANCES]')
-        print('[CREATING BUCKET POLICY FOR USER INSTANCES]')
         params = '--bucket_name {} --shared_bucket_name {} --username {} --edge_role_name {} ' \
                  '--notebook_role_name {} --service_base_name {} --region {} ' \
                  '--user_predefined_s3_policies "{}" --endpoint_name {}'.format(
@@ -631,7 +615,6 @@ if __name__ == "__main__":
         else:
             edge_group_id = os.environ['aws_security_groups_ids']
         logging.info('[CREATE EDGE INSTANCE]')
-        print('[CREATE EDGE INSTANCE]')
         params = "--node_name {} --ami_id {} --instance_type {} --key_name {} --security_group_ids {} " \
                  "--subnet_id {} --iam_profile {} --infra_tag_name {} --infra_tag_value {}" \
             .format(project_conf['edge_instance_name'], project_conf['ami_id'], project_conf['instance_size'],
@@ -664,7 +647,6 @@ if __name__ == "__main__":
     if project_conf['network_type'] == 'public':
         try:
             logging.info('[ASSOCIATING ELASTIC IP]')
-            print('[ASSOCIATING ELASTIC IP]')
             project_conf['edge_id'] = datalab.meta_lib.get_instance_by_name(project_conf['tag_name'],
                                                                             project_conf['edge_instance_name'])
             try:
@@ -687,7 +669,7 @@ if __name__ == "__main__":
                 project_conf['allocation_id'] = datalab.meta_lib.get_allocation_id_by_elastic_ip(
                     project_conf['edge_public_ip'])
             except:
-                print("No Elastic IPs to release!")
+                logging.error("No Elastic IPs to release!")
             datalab.actions_lib.remove_ec2(project_conf['tag_name'], project_conf['edge_instance_name'])
             datalab.actions_lib.remove_all_iam_resources('notebook', project_conf['project_name'])
             datalab.actions_lib.remove_all_iam_resources('edge', project_conf['project_name'])
@@ -699,7 +681,7 @@ if __name__ == "__main__":
 
     if os.environ['edge_is_nat'] == 'true':
         try:
-            print('[CONFIGURING ROUTE TABLE FOR NAT]')
+            logging.info('[CONFIGURING ROUTE TABLE FOR NAT]')
             project_conf['nat_rt_name'] = '{0}-{1}-{2}-nat-rt'.format(project_conf['service_base_name'],
                                                                               project_conf['project_name'],
                                                                               project_conf['endpoint_name'])
@@ -718,7 +700,7 @@ if __name__ == "__main__":
                 project_conf['allocation_id'] = datalab.meta_lib.get_allocation_id_by_elastic_ip(
                     project_conf['edge_public_ip'])
             except:
-                print("No Elastic IPs to release!")
+                logging.error("No Elastic IPs to release!")
             datalab.actions_lib.remove_ec2(project_conf['tag_name'], project_conf['edge_instance_name'])
             datalab.actions_lib.remove_all_iam_resources('notebook', project_conf['project_name'])
             datalab.actions_lib.remove_all_iam_resources('edge', project_conf['project_name'])
diff --git a/infrastructure-provisioning/src/general/scripts/aws/project_terminate.py b/infrastructure-provisioning/src/general/scripts/aws/project_terminate.py
index 16fcf23..213c0ba 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/project_terminate.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/project_terminate.py
@@ -26,16 +26,16 @@ import datalab.actions_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
 import os
 import requests
 import sys
 import traceback
+from datalab.logger import logging
 
 
 def terminate_edge_node(tag_name, project_name, tag_value, nb_sg, edge_sg, de_sg, emr_sg, endpoint_name,
                         service_base_name):
-    print('Terminating EMR cluster')
+    logging.info('Terminating EMR cluster')
     try:
         clusters_list = datalab.meta_lib.get_emr_list(tag_name)
         if clusters_list:
@@ -46,28 +46,28 @@ def terminate_edge_node(tag_name, project_name, tag_value, nb_sg, edge_sg, de_sg
                 emr_name = cluster.get('Name')
                 if '{}'.format(tag_value[:-1]) in emr_name:
                     datalab.actions_lib.terminate_emr(cluster_id)
-                    print("The EMR cluster {} has been terminated successfully".format(emr_name))
+                    logging.info("The EMR cluster {} has been terminated successfully".format(emr_name))
         else:
-            print("There are no EMR clusters to terminate.")
+            logging.info("There are no EMR clusters to terminate.")
     except Exception as err:
         datalab.fab.append_result("Failed to terminate EMR cluster.", str(err))
         sys.exit(1)
 
-    print("Terminating EDGE and notebook instances")
+    logging.info("Terminating EDGE and notebook instances")
     try:
         datalab.actions_lib.remove_ec2(tag_name, tag_value)
     except Exception as err:
         datalab.fab.append_result("Failed to terminate instances.", str(err))
         sys.exit(1)
 
-    print("Removing s3 bucket")
+    logging.info("Removing s3 bucket")
     try:
         datalab.actions_lib.remove_s3('edge', project_name)
     except Exception as err:
         datalab.fab.append_result("Failed to remove buckets.", str(err))
         sys.exit(1)
 
-    print("Removing IAM roles and profiles")
+    logging.info("Removing IAM roles and profiles")
     try:
         datalab.actions_lib.remove_all_iam_resources('notebook', project_name, endpoint_name)
         datalab.actions_lib.remove_all_iam_resources('edge', project_name, endpoint_name)
@@ -75,14 +75,14 @@ def terminate_edge_node(tag_name, project_name, tag_value, nb_sg, edge_sg, de_sg
         datalab.fab.append_result("Failed to remove IAM roles and profiles.", str(err))
         sys.exit(1)
 
-    print("Deregistering project specific notebook's AMI")
+    logging.info("Deregistering project specific notebook's AMI")
     try:
         datalab.actions_lib.deregister_image('{}-{}-{}-*'.format(service_base_name, project_name, endpoint_name))
     except Exception as err:
         datalab.fab.append_result("Failed to deregister images.", str(err))
         sys.exit(1)
 
-    print("Removing security groups")
+    logging.info("Removing security groups")
     try:
         datalab.actions_lib.remove_sgroups(emr_sg)
         datalab.actions_lib.remove_sgroups(de_sg)
@@ -92,14 +92,14 @@ def terminate_edge_node(tag_name, project_name, tag_value, nb_sg, edge_sg, de_sg
         datalab.fab.append_result("Failed to remove Security Groups.", str(err))
         sys.exit(1)
 
-    print("Removing private subnet")
+    logging.info("Removing private subnet")
     try:
         datalab.actions_lib.remove_subnets(tag_value)
     except Exception as err:
         datalab.fab.append_result("Failed to remove subnets.", str(err))
         sys.exit(1)
 
-    print("Removing project route tables")
+    logging.info("Removing project route tables")
     try:
         datalab.actions_lib.remove_route_tables("Name", False, '{}-{}-{}-nat-rt'.format(service_base_name, project_name, endpoint_name))
     except Exception as err:
@@ -116,7 +116,7 @@ if __name__ == "__main__":
 
     # generating variables dictionary
     datalab.actions_lib.create_aws_config_files()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     project_conf = dict()
     project_conf['service_base_name'] = (os.environ['conf_service_base_name'])
     project_conf['project_name'] = os.environ['project_name']
@@ -142,7 +142,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[TERMINATE PROJECT]')
-        print('[TERMINATE PROJECT]')
         try:
             terminate_edge_node(project_conf['tag_name'], project_conf['project_name'], project_conf['tag_value'],
                                 project_conf['nb_sg'], project_conf['edge_sg'], project_conf['de_sg'],
@@ -151,22 +150,21 @@ if __name__ == "__main__":
             traceback.print_exc()
             datalab.fab.append_result("Failed to terminate project.", str(err))
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         sys.exit(1)
 
     try:
         endpoint_id = datalab.meta_lib.get_instance_by_name(project_conf['tag_name'],
                                                             project_conf['endpoint_instance_name'])
-        print("Endpoint id: " + endpoint_id)
+        logging.info("Endpoint id: " + endpoint_id)
         ec2 = boto3.client('ec2')
         ec2.delete_tags(Resources=[endpoint_id], Tags=[{'Key': 'project_tag'}, {'Key': 'endpoint_tag'}])
     except Exception as err:
-        print("Failed to remove Project tag from Enpoint", str(err))
+        logging.error("Failed to remove Project tag from Enpoint", str(err))
 #        traceback.print_exc()
 #        sys.exit(1)
 
     try:
-        print('[KEYCLOAK PROJECT CLIENT DELETE]')
         logging.info('[KEYCLOAK PROJECT CLIENT DELETE]')
         keycloak_auth_server_url = '{}/realms/master/protocol/openid-connect/token'.format(
             os.environ['keycloak_auth_server_url'])
@@ -202,14 +200,14 @@ if __name__ == "__main__":
             headers={"Authorization": "Bearer {}".format(keycloak_token.get("access_token")),
                      "Content-Type": "application/json"})
     except Exception as err:
-        print("Failed to remove project client from Keycloak", str(err))
+        logging.error("Failed to remove project client from Keycloak", str(err))
 
     try:
         with open("/root/result.json", 'w') as result:
             res = {"service_base_name": project_conf['service_base_name'],
                    "project_name": project_conf['project_name'],
                    "Action": "Terminate edge node"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/aws/rstudio_configure.py b/infrastructure-provisioning/src/general/scripts/aws/rstudio_configure.py
index 3ad6aca..1f77291 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/rstudio_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/rstudio_configure.py
@@ -26,12 +26,12 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
 import os
 import sys
 import traceback
 import subprocess
 from fabric import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--uuid', type=str, default='')
@@ -115,7 +115,6 @@ if __name__ == "__main__":
             notebook_config['sudo_group'] = 'wheel'
 
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             instance_hostname, "{}{}.pem".format(os.environ['conf_key_dir'], os.environ['conf_key_name']),
             notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
@@ -133,7 +132,6 @@ if __name__ == "__main__":
     # configuring proxy on Notebook instance
     try:
         logging.info('[CONFIGURE PROXY ON R_STUDIO INSTANCE]')
-        print('[CONFIGURE PROXY ON R_STUDIO INSTANCE]')
         additional_config = {"proxy_host": edge_instance_hostname, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}" \
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
@@ -151,7 +149,6 @@ if __name__ == "__main__":
     # updating repositories & installing python packages
     try:
         logging.info('[INSTALLING PREREQUISITES TO R_STUDIO NOTEBOOK INSTANCE]')
-        print('[INSTALLING PREREQUISITES TO R_STUDIO NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}". \
             format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['aws_region'],
                    edge_instance_private_ip)
@@ -168,7 +165,6 @@ if __name__ == "__main__":
     # installing and configuring R_STUDIO and all dependencies
     try:
         logging.info('[CONFIGURE R_STUDIO NOTEBOOK INSTANCE]')
-        print('[CONFIGURE R_STUDIO NOTEBOOK INSTANCE]')
         params = "--hostname {0}  --keyfile {1} " \
                  "--region {2} --rstudio_pass {3} " \
                  "--rstudio_version {4} --os_user {5} " \
@@ -189,7 +185,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": notebook_config['user_keyname'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -206,7 +201,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
@@ -222,7 +216,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[POST CONFIGURING PROCESS]')
-        print('[POST CONFIGURING PROCESS')
         if notebook_config['notebook_image_name'] not in [notebook_config['expected_image_name'], 'None', '']:
             params = "--hostname {} --keyfile {} --os_user {} --nb_tag_name {} --nb_tag_value {}" \
                 .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
@@ -238,7 +231,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         additional_info = {
             'instance_hostname': instance_hostname,
@@ -259,10 +251,10 @@ if __name__ == "__main__":
 
     if notebook_config['image_enabled'] == 'true':
         try:
-            print('[CREATING AMI]')
+            logging.info('[CREATING AMI]')
             ami_id = datalab.meta_lib.get_ami_id_by_name(notebook_config['expected_image_name'])
             if ami_id == '' and notebook_config['shared_image_enabled'] == 'false':
-                print("Looks like it's first time we configure notebook server. Creating image.")
+                logging.info("Looks like it's first time we configure notebook server. Creating image.")
                 try:
                     os.environ['conf_additional_tags'] = '{2};project_tag:{0};endpoint_tag:{1};'.format(
                         os.environ['project_name'], os.environ['endpoint_name'], os.environ['conf_additional_tags'])
@@ -273,7 +265,7 @@ if __name__ == "__main__":
                     tag_name=notebook_config['tag_name'], instance_name=notebook_config['instance_name'],
                     image_name=notebook_config['expected_image_name'])
                 if image_id != '':
-                    print("Image was successfully created. It's ID is {}".format(image_id))
+                    logging.info("Image was successfully created. It's ID is {}".format(image_id))
             else:
                 try:
                     os.environ['conf_additional_tags'] = '{};ami:shared;endpoint_tag:{};'.format(
@@ -285,7 +277,7 @@ if __name__ == "__main__":
                     tag_name=notebook_config['tag_name'], instance_name=notebook_config['instance_name'],
                     image_name=notebook_config['expected_image_name'])
                 if image_id != '':
-                    print("Image was successfully created. It's ID is {}".format(image_id))
+                    logging.info("Image was successfully created. It's ID is {}".format(image_id))
         except Exception as err:
             datalab.fab.append_result("Failed creating image.", str(err))
             datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
@@ -303,27 +295,27 @@ if __name__ == "__main__":
         rstudio_ungit_access_url = "https://{}/{}-ungit/".format(notebook_config['edge_instance_hostname'],
                                                                  notebook_config['exploratory_name'])
         ungit_ip_url = "http://" + ip_address + ":8085/{}-ungit/".format(notebook_config['exploratory_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['instance_name']))
-        print("Private DNS: {}".format(dns_name))
-        print("Private IP: {}".format(ip_address))
-        print("Instance ID: {}".format(datalab.meta_lib.get_instance_by_name(notebook_config['tag_name'],
+        logging.info('[SUMMARY]')
+        logging.info("Instance name: {}".format(notebook_config['instance_name']))
+        logging.info("Private DNS: {}".format(dns_name))
+        logging.info("Private IP: {}".format(ip_address))
+        logging.info("Instance ID: {}".format(datalab.meta_lib.get_instance_by_name(notebook_config['tag_name'],
                                                                              notebook_config['instance_name'])))
-        print("Instance type: {}".format(notebook_config['instance_type']))
-        print("Key name: {}".format(notebook_config['key_name']))
-        print("User key name: {}".format(notebook_config['user_keyname']))
-        print("AMI name: {}".format(notebook_config['notebook_image_name']))
-        print("Profile name: {}".format(notebook_config['role_profile_name']))
-        print("SG name: {}".format(notebook_config['security_group_name']))
-        print("Rstudio URL: {}".format(rstudio_ip_url))
-        print("Rstudio URL: {}".format(rstudio_dns_url))
-        print("Rstudio user: {}".format(notebook_config['datalab_ssh_user']))
-        print("Rstudio pass: {}".format(notebook_config['rstudio_pass']))
-        print("Ungit URL: {}".format(ungit_ip_url))
-        print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.
+        logging.info("Instance type: {}".format(notebook_config['instance_type']))
+        logging.info("Key name: {}".format(notebook_config['key_name']))
+        logging.info("User key name: {}".format(notebook_config['user_keyname']))
+        logging.info("AMI name: {}".format(notebook_config['notebook_image_name']))
+        logging.info("Profile name: {}".format(notebook_config['role_profile_name']))
+        logging.info("SG name: {}".format(notebook_config['security_group_name']))
+        logging.info("Rstudio URL: {}".format(rstudio_ip_url))
+        logging.info("Rstudio URL: {}".format(rstudio_dns_url))
+        logging.info("Rstudio user: {}".format(notebook_config['datalab_ssh_user']))
+        logging.info("Rstudio pass: {}".format(notebook_config['rstudio_pass']))
+        logging.info("Ungit URL: {}".format(ungit_ip_url))
+        logging.info('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.
               format(notebook_config['key_name'], notebook_config['datalab_ssh_user'], ip_address))
-        print('SSH access (from Edge node, via FQDN): ssh -i {0}.pem {1}@{2}'.
+        logging.info('SSH access (from Edge node, via FQDN): ssh -i {0}.pem {1}@{2}'.
               format(notebook_config['key_name'], notebook_config['datalab_ssh_user'], dns_name))
 
         with open("/root/result.json", 'w') as result:
diff --git a/infrastructure-provisioning/src/general/scripts/aws/rstudio_dataengine-service_create_configs.py b/infrastructure-provisioning/src/general/scripts/aws/rstudio_dataengine-service_create_configs.py
index 6ab945a..e340b46 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/rstudio_dataengine-service_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/rstudio_dataengine-service_create_configs.py
@@ -30,6 +30,7 @@ from datalab.common_lib import *
 from datalab.fab import *
 from datalab.notebook_lib import *
 from fabric import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--bucket', type=str, default='')
@@ -64,7 +65,7 @@ def configure_rstudio():
                 subprocess.run("sed -i '/DRFA/s/^/#/' " + spark_dir + "conf/log4j.properties", shell=True, check=True)
             subprocess.run('touch /home/' + args.os_user + '/.ensure_dir/rstudio_dataengine-service_ensured', shell=True, check=True)
         except Exception as err:
-            print('Error: {0}'.format(err))
+            logging.error('Error: {0}'.format(err))
             sys.exit(1)
     else:
         try:
@@ -81,7 +82,7 @@ def configure_rstudio():
             if args.emr_version == "emr-5.19.0":
                 subprocess.run("sed -i '/DRFA/s/^/#/' " + spark_dir + "conf/log4j.properties", shell=True, check=True)
         except Exception as err:
-            print('Error: {0}'.format(err))
+            logging.error('Error: {0}'.format(err))
             sys.exit(1)
 
 
diff --git a/infrastructure-provisioning/src/general/scripts/aws/ssn_terminate.py b/infrastructure-provisioning/src/general/scripts/aws/ssn_terminate.py
index 0c0e624..7167cd9 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/ssn_terminate.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/ssn_terminate.py
@@ -23,25 +23,20 @@
 
 import datalab.ssn_lib
 import json
-import logging
 import os
 import sys
 import traceback
 import subprocess
 from fabric import *
+from datalab.logger import logging
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}.log".format(os.environ['conf_resource'], os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     # generating variables dictionary
     if 'aws_access_key' in os.environ and 'aws_secret_access_key' in os.environ:
         datalab.actions_lib.create_aws_config_files(generate_full_config=True)
     else:
         datalab.actions_lib.create_aws_config_files()
-    print('Generating infrastructure names and tags')
+    logging.info('Generating infrastructure names and tags')
     ssn_conf = dict()
     ssn_conf['service_base_name'] = os.environ['conf_service_base_name'] = datalab.fab.replace_multi_symbols(
         os.environ['conf_service_base_name'][:20], '-', True)
@@ -53,7 +48,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[TERMINATE SSN]')
-        print('[TERMINATE SSN]')
         params = "--tag_name {} --edge_sg {} --nb_sg {} --de_sg {} --service_base_name {} --de_se_sg {}". \
                  format(ssn_conf['tag_name'], ssn_conf['edge_sg'], ssn_conf['nb_sg'], ssn_conf['de_sg'],
                         ssn_conf['service_base_name'], ssn_conf['de-service_sg'])
@@ -63,12 +57,12 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         datalab.fab.append_result("Failed to terminate ssn.", str(err))
         sys.exit(1)
 
     try:
-        print('[KEYCLOAK SSN CLIENT DELETE]')
+        logging.info('[KEYCLOAK SSN CLIENT DELETE]')
         logging.info('[KEYCLOAK SSN CLIENT DELETE]')
         keycloak_auth_server_url = '{}/realms/master/protocol/openid-connect/token'.format(
             os.environ['keycloak_auth_server_url'])
@@ -103,13 +97,13 @@ if __name__ == "__main__":
             headers={"Authorization": "Bearer {}".format(keycloak_token.get("access_token")),
                      "Content-Type": "application/json"})
     except Exception as err:
-        print("Failed to remove ssn client from Keycloak", str(err))
+        logging.error("Failed to remove ssn client from Keycloak", str(err))
 
     try:
         with open("/root/result.json", 'w') as result:
             res = {"service_base_name": ssn_conf['service_base_name'],
                    "Action": "Terminate ssn with all service_base_name environment"}
-            print(json.dumps(res))
+            logging.info(json.dumps(res))
             result.write(json.dumps(res))
     except Exception as err:
         datalab.fab.append_result("Error with writing results", str(err))
diff --git a/infrastructure-provisioning/src/general/scripts/aws/ssn_terminate_aws_resources.py b/infrastructure-provisioning/src/general/scripts/aws/ssn_terminate_aws_resources.py
index a3fbb38..cc529a4 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/ssn_terminate_aws_resources.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/ssn_terminate_aws_resources.py
@@ -23,7 +23,6 @@
 
 import argparse
 import boto3
-import logging
 import datalab.ssn_lib
 from datalab.logger import logging
 import os
diff --git a/infrastructure-provisioning/src/general/scripts/aws/tensor-rstudio_configure.py b/infrastructure-provisioning/src/general/scripts/aws/tensor-rstudio_configure.py
index 46fdeb7..83f942d 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/tensor-rstudio_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/tensor-rstudio_configure.py
@@ -26,12 +26,12 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
 import os
 import sys
 import traceback
 import subprocess
 from fabric import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--uuid', type=str, default='')
@@ -39,12 +39,6 @@ args = parser.parse_args()
 
 if __name__ == "__main__":
     instance_class = 'notebook'
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         notebook_config = dict()
         try:
@@ -114,7 +108,6 @@ if __name__ == "__main__":
             notebook_config['sudo_group'] = 'wheel'
 
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             instance_hostname, "{}{}.pem".format(os.environ['conf_key_dir'], os.environ['conf_key_name']),
             notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
@@ -132,7 +125,6 @@ if __name__ == "__main__":
     # configuring proxy on Notebook instance
     try:
         logging.info('[CONFIGURE PROXY ON TENSORFLOW-RSTUDIO INSTANCE]')
-        print('[CONFIGURE PROXY ON TENSORFLOW-RSTUDIO INSTANCE]')
         additional_config = {"proxy_host": edge_instance_hostname, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}"\
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
@@ -150,7 +142,6 @@ if __name__ == "__main__":
     # updating repositories & installing python packages
     try:
         logging.info('[INSTALLING PREREQUISITES TO TENSORFLOW-RSTUDIO NOTEBOOK INSTANCE]')
-        print('[INSTALLING PREREQUISITES TO TENSORFLOW-RSTUDIO NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}". \
             format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['aws_region'],
                    edge_instance_private_ip)
@@ -167,7 +158,6 @@ if __name__ == "__main__":
     # installing and configuring TensorFlow and RSTUDIO and all dependencies
     try:
         logging.info('[CONFIGURE TENSORFLOW-RSTUDIO NOTEBOOK INSTANCE]')
-        print('[CONFIGURE TENSORFLOW-RSTUDIO NOTEBOOK INSTANCE]')
         params = "--hostname {0}  --keyfile {1} " \
                  "--region {2} --rstudio_pass {3} " \
                  "--rstudio_version {4} --os_user {5} " \
@@ -188,7 +178,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": notebook_config['user_keyname'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -205,7 +194,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
@@ -221,7 +209,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[POST CONFIGURING PROCESS]')
-        print('[POST CONFIGURING PROCESS')
         if notebook_config['notebook_image_name'] not in [notebook_config['expected_image_name'], 'None', '']:
             params = "--hostname {} --keyfile {} --os_user {} --nb_tag_name {} --nb_tag_value {}" \
                 .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
@@ -237,7 +224,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         additional_info = {
             'instance_hostname': instance_hostname,
@@ -258,10 +244,10 @@ if __name__ == "__main__":
 
     if notebook_config['image_enabled'] == 'true':
         try:
-            print('[CREATING AMI]')
+            logging.info('[CREATING AMI]')
             ami_id = datalab.meta_lib.get_ami_id_by_name(notebook_config['expected_image_name'])
             if ami_id == '' and notebook_config['shared_image_enabled'] == 'false':
-                print("Looks like it's first time we configure notebook server. Creating image.")
+                logging.info("Looks like it's first time we configure notebook server. Creating image.")
                 try:
                     os.environ['conf_additional_tags'] = '{2};project_tag:{0};endpoint_tag:{1};'.format(
                         os.environ['project_name'], os.environ['endpoint_name'], os.environ['conf_additional_tags'])
@@ -272,9 +258,9 @@ if __name__ == "__main__":
                     tag_name=notebook_config['tag_name'], instance_name=notebook_config['instance_name'],
                     image_name=notebook_config['expected_image_name'])
                 if image_id != '':
-                    print("Image was successfully created. It's ID is {}".format(image_id))
+                    logging.info("Image was successfully created. It's ID is {}".format(image_id))
             else:
-                print("Looks like it's first time we configure notebook server. Creating image.")
+                logging.info("Looks like it's first time we configure notebook server. Creating image.")
                 try:
                     os.environ['conf_additional_tags'] = '{};ami:shared;endpoint_tag:{};'.format(
                         os.environ['conf_additional_tags'], os.environ['endpoint_name'])
@@ -285,7 +271,7 @@ if __name__ == "__main__":
                     tag_name=notebook_config['tag_name'], instance_name=notebook_config['instance_name'],
                     image_name=notebook_config['expected_image_name'])
                 if image_id != '':
-                    print("Image was successfully created. It's ID is {}".format(image_id))
+                    logging.info("Image was successfully created. It's ID is {}".format(image_id))
         except Exception as err:
             datalab.fab.append_result("Failed creating image.", str(err))
             datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
@@ -306,29 +292,28 @@ if __name__ == "__main__":
         rstudio_ungit_access_url = "https://{}/{}-ungit/".format(notebook_config['edge_instance_hostname'],
                                                                  notebook_config['exploratory_name'])
         ungit_ip_url = "http://" + ip_address + ":8085/{}-ungit/".format(notebook_config['exploratory_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['instance_name']))
-        print("Private DNS: {}".format(dns_name))
-        print("Private IP: {}".format(ip_address))
-        print("Instance ID: {}".format(datalab.meta_lib.get_instance_by_name(notebook_config['tag_name'],
+        logging.info("Instance name: {}".format(notebook_config['instance_name']))
+        logging.info("Private DNS: {}".format(dns_name))
+        logging.info("Private IP: {}".format(ip_address))
+        logging.info("Instance ID: {}".format(datalab.meta_lib.get_instance_by_name(notebook_config['tag_name'],
                                                                              notebook_config['instance_name'])))
-        print("Instance type: {}".format(notebook_config['instance_type']))
-        print("Key name: {}".format(notebook_config['key_name']))
-        print("User key name: {}".format(notebook_config['user_keyname']))
-        print("AMI name: {}".format(notebook_config['notebook_image_name']))
-        print("Profile name: {}".format(notebook_config['role_profile_name']))
-        print("SG name: {}".format(notebook_config['security_group_name']))
-        print("TensorBoard URL: {}".format(tensorboard_url))
-        print("TensorBoard log dir: /var/log/tensorboard")
-        print("Rstudio URL: {}".format(rstudio_ip_url))
-        print("Rstudio URL: {}".format(rstudio_dns_url))
-        print("Rstudio user: {}".format(notebook_config['datalab_ssh_user']))
-        print("Rstudio pass: {}".format(notebook_config['rstudio_pass']))
-        print("Ungit URL: {}".format(ungit_ip_url))
-        print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(
+        logging.info("Instance type: {}".format(notebook_config['instance_type']))
+        logging.info("Key name: {}".format(notebook_config['key_name']))
+        logging.info("User key name: {}".format(notebook_config['user_keyname']))
+        logging.info("AMI name: {}".format(notebook_config['notebook_image_name']))
+        logging.info("Profile name: {}".format(notebook_config['role_profile_name']))
+        logging.info("SG name: {}".format(notebook_config['security_group_name']))
+        logging.info("TensorBoard URL: {}".format(tensorboard_url))
+        logging.info("TensorBoard log dir: /var/log/tensorboard")
+        logging.info("Rstudio URL: {}".format(rstudio_ip_url))
+        logging.info("Rstudio URL: {}".format(rstudio_dns_url))
+        logging.info("Rstudio user: {}".format(notebook_config['datalab_ssh_user']))
+        logging.info("Rstudio pass: {}".format(notebook_config['rstudio_pass']))
+        logging.info("Ungit URL: {}".format(ungit_ip_url))
+        logging.info('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(
             notebook_config['key_name'], notebook_config['datalab_ssh_user'], ip_address))
-        print('SSH access (from Edge node, via FQDN): ssh -i {0}.pem {1}@{2}'.format(
+        logging.info('SSH access (from Edge node, via FQDN): ssh -i {0}.pem {1}@{2}'.format(
             notebook_config['key_name'], notebook_config['datalab_ssh_user'], dns_name))
 
         with open("/root/result.json", 'w') as result:
diff --git a/infrastructure-provisioning/src/general/scripts/aws/tensor_configure.py b/infrastructure-provisioning/src/general/scripts/aws/tensor_configure.py
index a77ce0f..2a0d115 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/tensor_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/tensor_configure.py
@@ -26,12 +26,12 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
 import os
 import sys
 import traceback
 import subprocess
 from fabric import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--uuid', type=str, default='')
@@ -39,12 +39,6 @@ args = parser.parse_args()
 
 if __name__ == "__main__":
     instance_class = 'notebook'
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         notebook_config = dict()
         try:
@@ -113,7 +107,6 @@ if __name__ == "__main__":
             notebook_config['sudo_group'] = 'wheel'
 
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             instance_hostname, "{}{}.pem".format(os.environ['conf_key_dir'], os.environ['conf_key_name']),
             notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
@@ -131,7 +124,6 @@ if __name__ == "__main__":
     # configuring proxy on Notebook instance
     try:
         logging.info('[CONFIGURE PROXY ON TENSOR INSTANCE]')
-        print('[CONFIGURE PROXY ON TENSOR INSTANCE]')
         additional_config = {"proxy_host": edge_instance_hostname, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}"\
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
@@ -149,7 +141,6 @@ if __name__ == "__main__":
     # updating repositories & installing python packages
     try:
         logging.info('[INSTALLING PREREQUISITES TO TENSOR NOTEBOOK INSTANCE]')
-        print('[INSTALLING PREREQUISITES TO TENSOR NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}". \
             format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['aws_region'],
                    edge_instance_private_ip)
@@ -166,7 +157,6 @@ if __name__ == "__main__":
     # installing and configuring TensorFlow and all dependencies
     try:
         logging.info('[CONFIGURE TENSORFLOW NOTEBOOK INSTANCE]')
-        print('[CONFIGURE TENSORFLOW NOTEBOOK INSTANCE]')
         params = "--hostname {0} --keyfile {1} " \
                  "--region {2} --os_user {3} " \
                  "--ip_address {4} --exploratory_name {5} --edge_ip {6}" \
@@ -184,7 +174,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": notebook_config['user_keyname'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -201,7 +190,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
@@ -217,7 +205,6 @@ if __name__ == "__main__":
 
     try:
         logging.info('[POST CONFIGURING PROCESS]')
-        print('[POST CONFIGURING PROCESS')
         if notebook_config['notebook_image_name'] not in [notebook_config['expected_image_name'], 'None', '']:
             params = "--hostname {} --keyfile {} --os_user {} --nb_tag_name {} --nb_tag_value {}" \
                 .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
@@ -233,7 +220,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         additional_info = {
             'instance_hostname': instance_hostname,
@@ -254,10 +240,10 @@ if __name__ == "__main__":
 
     if notebook_config['image_enabled'] == 'true':
         try:
-            print('[CREATING AMI]')
+            logging.info('[CREATING AMI]')
             ami_id = datalab.meta_lib.get_ami_id_by_name(notebook_config['expected_image_name'])
             if ami_id == '' and notebook_config['shared_image_enabled'] == 'false':
-                print("Looks like it's first time we configure notebook server. Creating image.")
+                logging.info("Looks like it's first time we configure notebook server. Creating image.")
                 try:
                     os.environ['conf_additional_tags'] = '{2};project_tag:{0};endpoint_tag:{1};'.format(
                         os.environ['project_name'], os.environ['endpoint_name'], os.environ['conf_additional_tags'])
@@ -268,9 +254,9 @@ if __name__ == "__main__":
                     tag_name=notebook_config['tag_name'], instance_name=notebook_config['instance_name'],
                     image_name=notebook_config['expected_image_name'])
                 if image_id != '':
-                    print("Image was successfully created. It's ID is {}".format(image_id))
+                    logging.info("Image was successfully created. It's ID is {}".format(image_id))
             else:
-                print("Looks like it's first time we configure notebook server. Creating image.")
+                logging.info("Looks like it's first time we configure notebook server. Creating image.")
                 try:
                     os.environ['conf_additional_tags'] = '{};ami:shared;endpoint_tag:{};'.format(
                         os.environ['conf_additional_tags'], os.environ['endpoint_name'])
@@ -281,7 +267,7 @@ if __name__ == "__main__":
                     tag_name=notebook_config['tag_name'], instance_name=notebook_config['instance_name'],
                     image_name=notebook_config['expected_image_name'])
                 if image_id != '':
-                    print("Image was successfully created. It's ID is {}".format(image_id))
+                    logging.info("Image was successfully created. It's ID is {}".format(image_id))
         except Exception as err:
             datalab.fab.append_result("Failed creating image.", str(err))
             datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
@@ -300,26 +286,25 @@ if __name__ == "__main__":
         jupyter_ungit_access_url = "https://{}/{}-ungit/".format(notebook_config['edge_instance_hostname'],
                                                                  notebook_config['exploratory_name'])
         ungit_ip_url = "http://" + ip_address + ":8085/{}-ungit/".format(notebook_config['exploratory_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['instance_name']))
-        print("Private DNS: {}".format(dns_name))
-        print("Private IP: {}".format(ip_address))
-        print("Instance ID: {}".format(datalab.meta_lib.get_instance_by_name(notebook_config['tag_name'],
+        logging.info("Instance name: {}".format(notebook_config['instance_name']))
+        logging.info("Private DNS: {}".format(dns_name))
+        logging.info("Private IP: {}".format(ip_address))
+        logging.info("Instance ID: {}".format(datalab.meta_lib.get_instance_by_name(notebook_config['tag_name'],
                                                                              notebook_config['instance_name'])))
-        print("Instance type: {}".format(notebook_config['instance_type']))
-        print("Key name: {}".format(notebook_config['key_name']))
-        print("User key name: {}".format(notebook_config['user_keyname']))
-        print("AMI name: {}".format(notebook_config['notebook_image_name']))
-        print("Profile name: {}".format(notebook_config['role_profile_name']))
-        print("SG name: {}".format(notebook_config['security_group_name']))
-        print("TensorBoard URL: {}".format(tensorboard_url))
-        print("TensorBoard log dir: /var/log/tensorboard")
-        print("Jupyter URL: {}".format(jupyter_ip_url))
-        print("Ungit URL: {}".format(ungit_ip_url))
-        print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(
+        logging.info("Instance type: {}".format(notebook_config['instance_type']))
+        logging.info("Key name: {}".format(notebook_config['key_name']))
+        logging.info("User key name: {}".format(notebook_config['user_keyname']))
+        logging.info("AMI name: {}".format(notebook_config['notebook_image_name']))
+        logging.info("Profile name: {}".format(notebook_config['role_profile_name']))
+        logging.info("SG name: {}".format(notebook_config['security_group_name']))
+        logging.info("TensorBoard URL: {}".format(tensorboard_url))
+        logging.info("TensorBoard log dir: /var/log/tensorboard")
+        logging.info("Jupyter URL: {}".format(jupyter_ip_url))
+        logging.info("Ungit URL: {}".format(ungit_ip_url))
+        logging.info('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(
             notebook_config['key_name'], notebook_config['datalab_ssh_user'], ip_address))
-        print('SSH access (from Edge node, via FQDN): ssh -i {0}.pem {1}@{2}'.format(
+        logging.info('SSH access (from Edge node, via FQDN): ssh -i {0}.pem {1}@{2}'.format(
             notebook_config['key_name'], notebook_config['datalab_ssh_user'], dns_name))
 
         with open("/root/result.json", 'w') as result:
diff --git a/infrastructure-provisioning/src/general/scripts/aws/zeppelin_configure.py b/infrastructure-provisioning/src/general/scripts/aws/zeppelin_configure.py
index 3e86490..96105fe 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/zeppelin_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/zeppelin_configure.py
@@ -26,12 +26,12 @@ import datalab.fab
 import datalab.actions_lib
 import datalab.meta_lib
 import json
-import logging
 import os
 import sys
 import traceback
 import subprocess
 from fabric import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--uuid', type=str, default='')
@@ -39,12 +39,6 @@ args = parser.parse_args()
 
 if __name__ == "__main__":
     instance_class = 'notebook'
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
     try:
         notebook_config = dict()
         try:
@@ -119,7 +113,6 @@ if __name__ == "__main__":
             notebook_config['sudo_group'] = 'wheel'
 
         logging.info('[CREATING DATALAB SSH USER]')
-        print('[CREATING DATALAB SSH USER]')
         params = "--hostname {} --keyfile {} --initial_user {} --os_user {} --sudo_group {}".format(
             instance_hostname, "{}{}.pem".format(os.environ['conf_key_dir'], os.environ['conf_key_name']),
             notebook_config['initial_user'], notebook_config['datalab_ssh_user'], notebook_config['sudo_group'])
@@ -137,7 +130,6 @@ if __name__ == "__main__":
     # configuring proxy on Notebook instance
     try:
         logging.info('[CONFIGURE PROXY ON ZEPPELIN INSTANCE]')
-        print('[CONFIGURE PROXY ON ZEPPELIN INSTANCE]')
         additional_config = {"proxy_host": edge_instance_hostname, "proxy_port": "3128"}
         params = "--hostname {} --instance_name {} --keyfile {} --additional_config '{}' --os_user {}" \
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, json.dumps(additional_config),
@@ -155,7 +147,6 @@ if __name__ == "__main__":
     # updating repositories & installing python packages
     try:
         logging.info('[INSTALLING PREREQUISITES TO ZEPPELIN NOTEBOOK INSTANCE]')
-        print('[INSTALLING PREREQUISITES TO ZEPPELIN NOTEBOOK INSTANCE]')
         params = "--hostname {} --keyfile {} --user {} --region {} --edge_private_ip {}" \
             .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'], os.environ['aws_region'],
                     edge_instance_private_ip)
@@ -165,7 +156,7 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         datalab.fab.append_result("Failed installing apps: apt & pip.", str(err))
         datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
         sys.exit(1)
@@ -173,7 +164,6 @@ if __name__ == "__main__":
     # installing and configuring zeppelin and all dependencies
     try:
         logging.info('[CONFIGURE ZEPPELIN NOTEBOOK INSTANCE]')
-        print('[CONFIGURE ZEPPELIN NOTEBOOK INSTANCE]')
         additional_config = {"frontend_hostname": edge_instance_hostname,
                              "backend_hostname": datalab.meta_lib.get_instance_hostname(notebook_config['tag_name'],
                                                                                         notebook_config[
@@ -208,7 +198,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[INSTALLING USERs KEY]')
         logging.info('[INSTALLING USERs KEY]')
         additional_config = {"user_keyname": notebook_config['user_keyname'],
                              "user_keydir": os.environ['conf_key_dir']}
@@ -225,7 +214,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP USER GIT CREDENTIALS]')
         logging.info('[SETUP USER GIT CREDENTIALS]')
         params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
             .format(notebook_config['datalab_ssh_user'], instance_hostname, keyfile_name)
@@ -241,7 +229,6 @@ if __name__ == "__main__":
     
     try:
         logging.info('[POST CONFIGURING PROCESS]')
-        print('[POST CONFIGURING PROCESS')
         if notebook_config['notebook_image_name'] not in [notebook_config['expected_image_name'], 'None', '']:
             params = "--hostname {} --keyfile {} --os_user {} --nb_tag_name {} --nb_tag_value {}" \
                 .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
@@ -257,7 +244,6 @@ if __name__ == "__main__":
         sys.exit(1)
 
     try:
-        print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         additional_info = {
             'instance_hostname': instance_hostname,
@@ -278,10 +264,10 @@ if __name__ == "__main__":
 
     if notebook_config['image_enabled'] == 'true':
         try:
-            print('[CREATING AMI]')
+            logging.info('[CREATING AMI]')
             ami_id = datalab.meta_lib.get_ami_id_by_name(notebook_config['expected_image_name'])
             if ami_id == '' and notebook_config['shared_image_enabled'] == 'false':
-                print("Looks like it's first time we configure notebook server. Creating image.")
+                logging.info("Looks like it's first time we configure notebook server. Creating image.")
                 try:
                     os.environ['conf_additional_tags'] = '{2};project_tag:{0};endpoint_tag:{1};'.format(
                         os.environ['project_name'], os.environ['endpoint_name'], os.environ['conf_additional_tags'])
@@ -292,9 +278,9 @@ if __name__ == "__main__":
                     tag_name=notebook_config['tag_name'], instance_name=notebook_config['instance_name'],
                     image_name=notebook_config['expected_image_name'])
                 if image_id != '':
-                    print("Image was successfully created. It's ID is {}".format(image_id))
+                    logging.info("Image was successfully created. It's ID is {}".format(image_id))
             else:
-                print("Looks like it's first time we configure notebook server. Creating image.")
+                logging.info("Looks like it's first time we configure notebook server. Creating image.")
                 try:
                     os.environ['conf_additional_tags'] = '{};ami:shared;endpoint_tag:{};'.format(
                         os.environ['conf_additional_tags'], os.environ['endpoint_name'])
@@ -305,7 +291,7 @@ if __name__ == "__main__":
                     tag_name=notebook_config['tag_name'], instance_name=notebook_config['instance_name'],
                     image_name=notebook_config['expected_image_name'])
                 if image_id != '':
-                    print("Image was successfully created. It's ID is {}".format(image_id))
+                    logging.info("Image was successfully created. It's ID is {}".format(image_id))
         except Exception as err:
             datalab.fab.append_result("Failed creating image.", str(err))
             datalab.actions_lib.remove_ec2(notebook_config['tag_name'], notebook_config['instance_name'])
@@ -322,25 +308,24 @@ if __name__ == "__main__":
         zeppelin_ungit_access_url = "https://{}/{}-ungit/".format(notebook_config['edge_instance_hostname'],
                                                                   notebook_config['exploratory_name'])
         ungit_ip_url = "http://" + ip_address + ":8085/{}-ungit/".format(notebook_config['exploratory_name'])
-        print('[SUMMARY]')
         logging.info('[SUMMARY]')
-        print("Instance name: {}".format(notebook_config['instance_name']))
-        print("Private DNS: {}".format(dns_name))
-        print("Private IP: {}".format(ip_address))
-        print("Instance ID: {}".format(datalab.meta_lib.get_instance_by_name(notebook_config['tag_name'],
+        logging.info("Instance name: {}".format(notebook_config['instance_name']))
+        logging.info("Private DNS: {}".format(dns_name))
+        logging.info("Private IP: {}".format(ip_address))
+        logging.info("Instance ID: {}".format(datalab.meta_lib.get_instance_by_name(notebook_config['tag_name'],
                                                                              notebook_config['instance_name'])))
-        print("Instance type: {}".format(notebook_config['instance_type']))
-        print("Key name: {}".format(notebook_config['key_name']))
-        print("User key name: {}".format(notebook_config['user_keyname']))
-        print("AMI name: {}".format(notebook_config['notebook_image_name']))
-        print("Profile name: {}".format(notebook_config['role_profile_name']))
-        print("SG name: {}".format(notebook_config['security_group_name']))
-        print("Zeppelin URL: {}".format(zeppelin_ip_url))
-        print("Zeppelin URL: {}".format(zeppelin_dns_url))
-        print("Ungit URL: {}".format(ungit_ip_url))
-        print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.
+        logging.info("Instance type: {}".format(notebook_config['instance_type']))
+        logging.info("Key name: {}".format(notebook_config['key_name']))
+        logging.info("User key name: {}".format(notebook_config['user_keyname']))
+        logging.info("AMI name: {}".format(notebook_config['notebook_image_name']))
+        logging.info("Profile name: {}".format(notebook_config['role_profile_name']))
+        logging.info("SG name: {}".format(notebook_config['security_group_name']))
+        logging.info("Zeppelin URL: {}".format(zeppelin_ip_url))
+        logging.info("Zeppelin URL: {}".format(zeppelin_dns_url))
+        logging.info("Ungit URL: {}".format(ungit_ip_url))
+        logging.info('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.
               format(notebook_config['key_name'], notebook_config['datalab_ssh_user'], ip_address))
-        print('SSH access (from Edge node, via FQDN): ssh -i {0}.pem {1}@{2}'.
+        logging.info('SSH access (from Edge node, via FQDN): ssh -i {0}.pem {1}@{2}'.
               format(notebook_config['key_name'], notebook_config['datalab_ssh_user'], dns_name))
 
         with open("/root/result.json", 'w') as result:

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org