You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@datalab.apache.org by lf...@apache.org on 2021/10/11 15:00:19 UTC

[incubator-datalab] 01/04: [DATALAB-2409]: replaced print with logging in all src/ .py scripts except in fabfiles and general/

This is an automated email from the ASF dual-hosted git repository.

lfrolov pushed a commit to branch DATALAB-2409
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git

commit 1638c1d715a2c8200afcd9b2660184965639f6aa
Author: leonidfrolov <fr...@gmail.com>
AuthorDate: Mon Oct 11 15:42:43 2021 +0300

    [DATALAB-2409]: replaced print with logging in all src/ .py scripts except in fabfiles and general/
---
 .../src/base/scripts/configure_keycloak.py         |  6 +-
 .../src/base/scripts/create_ssh_user.py            |  7 +-
 .../src/base/scripts/install_user_key.py           | 17 ++--
 .../src/dataengine/scripts/configure_dataengine.py | 51 ++++++------
 .../scripts/configure_deep_learning_node.py        | 57 ++++++-------
 .../src/edge/scripts/configure_http_proxy.py       |  5 +-
 .../edge/scripts/configure_nginx_reverse_proxy.py  | 16 +---
 .../src/edge/scripts/reupload_ssh_key.py           | 12 +--
 .../src/general/lib/aws/actions_lib.py             |  2 +-
 .../src/general/lib/aws/meta_lib.py                |  2 +-
 .../src/general/lib/azure/actions_lib.py           |  2 +-
 .../src/general/lib/azure/meta_lib.py              |  2 +-
 .../src/general/lib/gcp/actions_lib.py             |  2 +-
 .../src/general/lib/gcp/meta_lib.py                |  2 +-
 .../src/general/lib/os/debian/ssn_lib.py           |  1 +
 .../src/jupyter/scripts/configure_jupyter_node.py  | 51 ++++++------
 .../scripts/configure_jupyterlab_node.py           | 17 ++--
 .../src/project/scripts/configure_http_proxy.py    |  5 +-
 .../src/project/scripts/configure_nftables.py      |  5 +-
 .../scripts/configure_nginx_reverse_proxy.py       | 18 ++--
 .../src/project/scripts/reupload_ssh_key.py        | 12 +--
 .../src/rstudio/scripts/configure_rstudio_node.py  | 31 +++----
 .../src/ssn/scripts/backup.py                      | 37 +++++----
 .../src/ssn/scripts/configure_billing.py           | 11 +--
 .../src/ssn/scripts/configure_conf_file.py         |  9 +-
 .../src/ssn/scripts/configure_docker.py            | 31 +++----
 .../src/ssn/scripts/configure_gitlab.py            | 23 ++---
 .../src/ssn/scripts/configure_mongo.py             |  7 +-
 .../src/ssn/scripts/configure_ssn_node.py          | 49 +++++------
 .../src/ssn/scripts/configure_ui.py                | 26 +++---
 .../src/ssn/scripts/docker_build.py                |  3 +-
 .../src/ssn/scripts/gitlab_deploy.py               | 41 ++++-----
 .../src/ssn/scripts/resource_status.py             |  5 +-
 .../src/ssn/scripts/restore.py                     | 97 +++++++++++-----------
 .../src/ssn/scripts/upload_response_file.py        |  8 +-
 .../superset/scripts/configure_superset_node.py    | 15 ++--
 .../scripts/configure_tensor-rstudio_node.py       | 37 +++++----
 .../src/tensor/scripts/configure_tensor_node.py    | 43 +++++-----
 .../zeppelin/scripts/configure_zeppelin_node.py    | 51 ++++++------
 39 files changed, 404 insertions(+), 412 deletions(-)

diff --git a/infrastructure-provisioning/src/base/scripts/configure_keycloak.py b/infrastructure-provisioning/src/base/scripts/configure_keycloak.py
index 614b812..ecba5d6 100644
--- a/infrastructure-provisioning/src/base/scripts/configure_keycloak.py
+++ b/infrastructure-provisioning/src/base/scripts/configure_keycloak.py
@@ -22,7 +22,7 @@
 # ******************************************************************************
 
 import argparse
-import logging
+from datalab.logger import logging
 import requests
 import uuid
 from datalab.actions_lib import *
@@ -47,7 +47,6 @@ args = parser.parse_args()
 ##############
 if __name__ == "__main__":
     try:
-        print('[CONFIGURE KEYCLOAK]')
         logging.info('[CONFIGURE KEYCLOAK]')
         keycloak_auth_server_url = '{}/realms/master/protocol/openid-connect/token'.format(
             args.keycloak_auth_server_url)
@@ -70,7 +69,6 @@ if __name__ == "__main__":
         keycloak_client_id = str(uuid.uuid4())
         if args.hostname == '':
             keycloak_redirectUris = 'https://{0}/*,http://{0}/*'.format(args.instance_public_ip).lower().split(',')
-            print(keycloak_redirectUris)
         else:
             keycloak_redirectUris = 'https://{0}/*,http://{0}/*,https://{1}/*,http://{1}/*'.format(
                 args.instance_public_ip, args.hostname).lower().split(',')
@@ -98,5 +96,5 @@ if __name__ == "__main__":
             append_result("Failed to configure keycloak.")
             raise Exception
     except Exception as err:
-        print('Error: {0}'.format(err))
+        logging.error('Error: {0}'.format(err))
         append_result("Failed to configure keycloak.", str(err))
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/base/scripts/create_ssh_user.py b/infrastructure-provisioning/src/base/scripts/create_ssh_user.py
index 183295c..0b7c6c7 100644
--- a/infrastructure-provisioning/src/base/scripts/create_ssh_user.py
+++ b/infrastructure-provisioning/src/base/scripts/create_ssh_user.py
@@ -30,6 +30,7 @@ import sys
 import time
 import traceback
 from patchwork.files import exists
+from datalab.logger import logging
 from patchwork import files
 
 parser = argparse.ArgumentParser()
@@ -55,13 +56,13 @@ def ensure_ssh_user(initial_user, os_user, sudo_group):
         conn.sudo('touch /home/{}/.ssh_user_ensured'.format(initial_user))
 
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     global conn
     conn = datalab.fab.init_datalab_connection(args.hostname, args.initial_user, args.keyfile)
-    print("Creating ssh user: {}".format(args.os_user))
+    logging.info("Creating ssh user: {}".format(args.os_user))
     try:
         ensure_ssh_user(args.initial_user, args.os_user, args.sudo_group)
     except Exception as err:
-        print('Failed to create ssh user', str(err))
+        logging.error('Failed to create ssh user', str(err))
         sys.exit(1)
     conn.close()
diff --git a/infrastructure-provisioning/src/base/scripts/install_user_key.py b/infrastructure-provisioning/src/base/scripts/install_user_key.py
index 2e68abd..ef29684 100644
--- a/infrastructure-provisioning/src/base/scripts/install_user_key.py
+++ b/infrastructure-provisioning/src/base/scripts/install_user_key.py
@@ -26,6 +26,7 @@ import json
 import sys
 import subprocess
 from datalab.fab import *
+from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -45,38 +46,38 @@ def copy_key(config):
         user_key = '{}{}.pub'.format(
             config.get('user_keydir'),
             config.get('user_keyname'))
-        print(user_key)
+        logging.info(user_key)
         if 'user_key' not in config or config.get('user_key') == None:
             key = open('{0}'.format(user_key)).read()
         else:
             key = config.get('user_key')
         conn.sudo('echo "{0}" >> /home/{1}/.ssh/authorized_keys'.format(key, args.user))
     except:
-        print('No user key')
+        logging.error('No user key')
 
 ##############
 # Run script #
 ##############
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     try:
         global conn
         conn = datalab.fab.init_datalab_connection(args.hostname, args.user, args.keyfile)
         deeper_config = json.loads(args.additional_config)
     except:
-        print('Fail connection')
+        logging.error('Fail connection')
         sys.exit(2)
     try:
-        print("Ensuring safest ssh ciphers")
+        logging.info("Ensuring safest ssh ciphers")
         ensure_ciphers()
     except:
-        print('Faild to install safest ssh ciphers')
+        logging.error('Faild to install safest ssh ciphers')
 
-    print("Installing users key...")
+    logging.info("Installing users key...")
     try:
         copy_key(deeper_config)
         #conn.close()
     except:
-        print("Users keyfile {0} could not be found at {1}/{0}".format(args.keyfile, deeper_config['user_keydir']))
+        logging.error("Users keyfile {0} could not be found at {1}/{0}".format(args.keyfile, deeper_config['user_keydir']))
         sys.exit(1)
 
diff --git a/infrastructure-provisioning/src/dataengine/scripts/configure_dataengine.py b/infrastructure-provisioning/src/dataengine/scripts/configure_dataengine.py
index 4de0ca3..1c89423 100644
--- a/infrastructure-provisioning/src/dataengine/scripts/configure_dataengine.py
+++ b/infrastructure-provisioning/src/dataengine/scripts/configure_dataengine.py
@@ -27,6 +27,7 @@ import sys
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.notebook_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--hostname', type=str, default='')
@@ -116,12 +117,12 @@ def start_spark(os_user, master_ip, node):
 
 
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     global conn
     conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
 
     # PREPARE DISK
-    print("Prepare .ensure directory")
+    logging.info("Prepare .ensure directory")
     try:
         if not exists(conn,'/home/' + args.os_user + '/.ensure_dir'):
             conn.sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
@@ -129,77 +130,77 @@ if __name__ == "__main__":
         sys.exit(1)
 
     # INSTALL LANGUAGES
-    print("Install Java")
+    logging.info("Install Java")
     ensure_jre_jdk(args.os_user)
     if os.environ['application'] in ('jupyter', 'zeppelin'):
-        print("Install Scala")
+        logging.info("Install Scala")
         ensure_scala(scala_link, args.scala_version, args.os_user)
     if (os.environ['application'] in ('jupyter', 'zeppelin')
         and os.environ['notebook_r_enabled'] == 'true') \
             or os.environ['application'] in ('rstudio', 'tensor-rstudio'):
-        print("Installing R")
+        logging.info("Installing R")
         ensure_r(args.os_user, r_libs)
-    print("Install Python 3 modules")
+    logging.info("Install Python 3 modules")
     ensure_python3_libraries(args.os_user)
     if os.environ['application'] == 'zeppelin':
-        print("Install python3 specific version")
+        logging.info("Install python3 specific version")
         ensure_python3_specific_version(python3_version, args.os_user)
 
     # INSTALL PYTHON IN VIRTUALENV
-    print("Configure Python Virtualenv")
+    logging.info("Configure Python Virtualenv")
     ensure_python_venv(python_venv_version)
 
     # INSTALL SPARK AND CLOUD STORAGE JARS FOR SPARK
-    print("Install Spark")
+    logging.info("Install Spark")
     ensure_local_spark(args.os_user, spark_link, spark_version, hadoop_version, local_spark_path)
-    print("Install storage jars")
+    logging.info("Install storage jars")
     ensure_local_jars(args.os_user, jars_dir)
-    print("Configure local Spark")
+    logging.info("Configure local Spark")
     configure_local_spark(jars_dir, templates_dir, '')
 
     # INSTALL TENSORFLOW AND OTHER DEEP LEARNING LIBRARIES
     if os.environ['application'] in ('tensor', 'tensor-rstudio', 'deeplearning'):
-        print("Installing TensorFlow")
+        logging.info("Installing TensorFlow")
         install_tensor(args.os_user, cuda_version, cuda_file_name,
                        cudnn_version, cudnn_file_name, tensorflow_version,
                        templates_dir, nvidia_version)
-        print("Install Theano")
+        logging.info("Install Theano")
         install_theano(args.os_user, theano_version)
-        print("Installing Keras")
+        logging.info("Installing Keras")
         install_keras(args.os_user, keras_version)
 
     # INSTALL DEEP LEARNING FRAMEWORKS
     if os.environ['application'] == 'deeplearning':
-        print("Installing Caffe2")
+        logging.info("Installing Caffe2")
         install_caffe2(args.os_user, caffe2_version, cmake_version)
-        #print("Installing Torch")
+        #logging.info("Installing Torch")
         #install_torch(args.os_user)
-        print("Install CNTK Python library")
+        logging.info("Install CNTK Python library")
         install_cntk(args.os_user, cntk_version)
-        print("Installing MXNET")
+        logging.info("Installing MXNET")
         install_mxnet(args.os_user, mxnet_version)
 
     # START SPARK CLUSTER
     if args.node_type == 'master':
-        print("Starting Spark master")
+        logging.info("Starting Spark master")
         start_spark(args.os_user, args.hostname, node='master')
     elif args.node_type == 'slave':
-        print("Starting Spark slave")
+        logging.info("Starting Spark slave")
         start_spark(args.os_user, args.master_ip, node='slave')
 
     # INSTALL OPTIONAL PACKAGES
     if os.environ['application'] in ('jupyter', 'zeppelin', 'tensor', 'deeplearning'):
-        print("Install additional Python packages")
+        logging.info("Install additional Python packages")
         ensure_additional_python_libs(args.os_user)
-        print("Install matplotlib")
+        logging.info("Install matplotlib")
         ensure_matplot(args.os_user)
     if os.environ['application'] == 'jupyter':
-        print("Install SBT")
+        logging.info("Install SBT")
         ensure_sbt(args.os_user)
-        print("Install Breeze")
+        logging.info("Install Breeze")
         add_breeze_library_local(args.os_user)
     if os.environ['application'] == 'zeppelin' and os.environ['notebook_r_enabled'] == 'true':
-        print("Install additional R packages")
+        logging.info("Install additional R packages")
         install_r_packages(args.os_user)
 
     # INSTALL LIVY
diff --git a/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py b/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py
index 2c7a88d..f23ee12 100644
--- a/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py
+++ b/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py
@@ -30,6 +30,7 @@ from datalab.notebook_lib import *
 from fabric import *
 from patchwork.files import exists
 from patchwork import files
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--hostname', type=str, default='')
@@ -107,22 +108,22 @@ def configure_jupyterlab_at_gcp_image(os_user, exploratory_name):
 
 
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     global conn
     conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
 
     # PREPARE DISK
-    print("Prepare .ensure directory")
+    logging.info("Prepare .ensure directory")
     try:
         if not exists(conn,'/home/' + args.os_user + '/.ensure_dir'):
             conn.sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
             conn.sudo('touch /home/' + args.os_user + '/.ensure_dir/deep_learning')
     except:
         sys.exit(1)
-    print("Mount additional volume")
+    logging.info("Mount additional volume")
     if os.environ['conf_cloud_provider'] == 'gcp' and os.environ['conf_deeplearning_cloud_ami'] == 'true':
-        print('Additional disk premounted by google image')
-        print('Installing nvidia drivers')
+        logging.info('Additional disk premounted by google image')
+        logging.info('Installing nvidia drivers')
         try:
             conn.sudo('/opt/deeplearning/install-driver.sh')
         except:
@@ -133,76 +134,76 @@ if __name__ == "__main__":
 
     if os.environ['conf_deeplearning_cloud_ami'] == 'false':
         # INSTALL LANGUAGES
-        print("Install Java")
+        logging.info("Install Java")
         ensure_jre_jdk(args.os_user)
-        print("Install Python 3 modules")
+        logging.info("Install Python 3 modules")
         ensure_python3_libraries(args.os_user)
 
         # INSTALL TENSORFLOW AND OTHER DEEP LEARNING LIBRARIES AND FRAMEWORKS
-        print("Install TensorFlow")
+        logging.info("Install TensorFlow")
         install_tensor(args.os_user, cuda_version, cuda_file_name,
                        cudnn_version, cudnn_file_name, tensorflow_version,
                        templates_dir, nvidia_version)
-        print("Install Theano")
+        logging.info("Install Theano")
         install_theano(args.os_user, theano_version)
-        print("Installing Keras")
+        logging.info("Installing Keras")
         install_keras(args.os_user, keras_version)
-        print("Installing Caffe2")
+        logging.info("Installing Caffe2")
         install_caffe2(args.os_user, caffe2_version, cmake_version)
-        #print("Installing Torch")
+        #logging.info("Installing Torch")
         #install_torch(args.os_user)
-        print("Install CNTK Python library")
+        logging.info("Install CNTK Python library")
         install_cntk(args.os_user, cntk_version)
-        print("Installing MXNET")
+        logging.info("Installing MXNET")
         install_mxnet(args.os_user, mxnet_version)
 
         # INSTALL JUPYTER NOTEBOOK
-        print("Install Jupyter")
+        logging.info("Install Jupyter")
         configure_jupyter(args.os_user, jupyter_conf_file, templates_dir, args.jupyter_version, args.exploratory_name)
 
         # INSTALL SPARK AND CLOUD STORAGE JARS FOR SPARK
-        print("Install local Spark")
+        logging.info("Install local Spark")
         ensure_local_spark(args.os_user, spark_link, spark_version, hadoop_version, local_spark_path)
-        print("Install storage jars")
+        logging.info("Install storage jars")
         ensure_local_jars(args.os_user, jars_dir)
-        print("Configure local Spark")
+        logging.info("Configure local Spark")
         configure_local_spark(jars_dir, templates_dir)
 
         # INSTALL JUPYTER KERNELS
-        print("Install pyspark local kernel for Jupyter")
+        logging.info("Install pyspark local kernel for Jupyter")
         ensure_pyspark_local_kernel(args.os_user, pyspark_local_path_dir, templates_dir, spark_version)
-        print("Install py3spark local kernel for Jupyter")
+        logging.info("Install py3spark local kernel for Jupyter")
         ensure_py3spark_local_kernel(args.os_user, py3spark_local_path_dir, templates_dir, spark_version)
-        #print("Installing ITorch kernel for Jupyter")
+        #logging.info("Installing ITorch kernel for Jupyter")
         #install_itorch(args.os_user)
 
         # INSTALL OPTIONAL PACKAGES
-        print("Installing additional Python packages")
+        logging.info("Installing additional Python packages")
         ensure_additional_python_libs(args.os_user)
-        print("Install Matplotlib")
+        logging.info("Install Matplotlib")
         ensure_matplot(args.os_user)
     elif os.environ['conf_deeplearning_cloud_ami'] == 'true' and os.environ['conf_cloud_provider'] != 'gcp':
         # CONFIGURE JUPYTER NOTEBOOK
-        print("Configure Jupyter")
+        logging.info("Configure Jupyter")
         configure_jupyter(args.os_user, jupyter_conf_file, templates_dir, args.jupyter_version, args.exploratory_name)
     else:
         configure_jupyterlab_at_gcp_image(args.os_user, args.exploratory_name)
 
 
     # INSTALL UNGIT
-    print("Install nodejs")
+    logging.info("Install nodejs")
     install_nodejs(args.os_user)
-    print("Install Ungit")
+    logging.info("Install Ungit")
     install_ungit(args.os_user, args.exploratory_name, args.edge_ip)
     if exists(conn, '/home/{0}/{1}'.format(args.os_user, gitlab_certfile)):
         install_gitlab_cert(args.os_user, gitlab_certfile)
 
     # INSTALL INACTIVITY CHECKER
-    print("Install inactivity checker")
+    logging.info("Install inactivity checker")
     install_inactivity_checker(args.os_user, args.ip_address)
 
     #POST INSTALLATION PROCESS
-    print("Updating pyOpenSSL library")
+    logging.info("Updating pyOpenSSL library")
     update_pyopenssl_lib(args.os_user)
 
     conn.close()
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/edge/scripts/configure_http_proxy.py b/infrastructure-provisioning/src/edge/scripts/configure_http_proxy.py
index 0e9034e..a7e8e53 100644
--- a/infrastructure-provisioning/src/edge/scripts/configure_http_proxy.py
+++ b/infrastructure-provisioning/src/edge/scripts/configure_http_proxy.py
@@ -26,6 +26,7 @@ import json
 import sys
 from fabric import *
 from datalab.fab import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--hostname', type=str, default='')
@@ -38,7 +39,7 @@ args = parser.parse_args()
 # Run script #
 ##############
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     try:
         global conn
         conn = datalab.fab.init_datalab_connection(args.hostname, args.user, args.keyfile)
@@ -46,6 +47,6 @@ if __name__ == "__main__":
     except:
         sys.exit(2)
 
-    print("Installing proxy for notebooks.")
+    logging.info("Installing proxy for notebooks.")
     datalab.fab.configure_http_proxy_server(deeper_config)
     conn.close()
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/edge/scripts/configure_nginx_reverse_proxy.py b/infrastructure-provisioning/src/edge/scripts/configure_nginx_reverse_proxy.py
index 6513db8..58d6a49 100644
--- a/infrastructure-provisioning/src/edge/scripts/configure_nginx_reverse_proxy.py
+++ b/infrastructure-provisioning/src/edge/scripts/configure_nginx_reverse_proxy.py
@@ -22,7 +22,7 @@
 # ******************************************************************************
 
 import argparse
-import logging
+from datalab.logger import logging
 import os
 import sys
 from datalab.edge_lib import install_nginx_ldap
@@ -36,20 +36,12 @@ parser.add_argument('--user', type=str, default='')
 args = parser.parse_args()
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'],
-                                               os.environ['edge_user_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/edge/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
-    print("Configure connections")
+    logging.info("Configure connections")
     try:
         global conn
         conn = datalab.fab.init_datalab_connection(args.hostname, args.user, args.keyfile)
     except Exception as err:
-        print("Failed establish connection. Excpeption: " + str(err))
+        logging.error("Failed establish connection. Excpeption: " + str(err))
         sys.exit(1)
 
     try:
@@ -58,6 +50,6 @@ if __name__ == "__main__":
                            os.environ['ldap_ou'], os.environ['ldap_service_password'],
                            os.environ['ldap_service_username'], os.environ['aws_iam_user'])
     except Exception as err:
-        print("Failed install nginx reverse proxy: " + str(err))
+        logging.error("Failed install nginx reverse proxy: " + str(err))
         sys.exit(1)
     conn.close()
diff --git a/infrastructure-provisioning/src/edge/scripts/reupload_ssh_key.py b/infrastructure-provisioning/src/edge/scripts/reupload_ssh_key.py
index 4f8483b..0199f88 100644
--- a/infrastructure-provisioning/src/edge/scripts/reupload_ssh_key.py
+++ b/infrastructure-provisioning/src/edge/scripts/reupload_ssh_key.py
@@ -21,7 +21,7 @@
 #
 # ******************************************************************************
 
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -32,18 +32,10 @@ from datalab.meta_lib import *
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/edge/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     try:
         if os.environ['conf_cloud_provider'] == 'aws':
             create_aws_config_files()
         logging.info('[REUPLOADING USER SSH KEY]')
-        print('[REUPLOADING USER SSH KEY]')
         reupload_config = dict()
         reupload_config['os_user'] = os.environ['conf_os_user']
         reupload_config['edge_user_name'] = os.environ['edge_user_name']
@@ -51,7 +43,7 @@ if __name__ == "__main__":
         reupload_config['resource_id'] = os.environ['resource_id']
         reupload_config['additional_config'] = {"user_keyname": reupload_config['edge_user_name'],
                                                 "user_keydir": os.environ['conf_key_dir']}
-        print(reupload_config)
+        logging.info(reupload_config)
         try:
             params = "--conf_resource {} --instance_id {} --os_user '{}'" \
                      " --keyfile '{}' --additional_config '{}'".format(
diff --git a/infrastructure-provisioning/src/general/lib/aws/actions_lib.py b/infrastructure-provisioning/src/general/lib/aws/actions_lib.py
index 567c3b6..4eddcbd 100644
--- a/infrastructure-provisioning/src/general/lib/aws/actions_lib.py
+++ b/infrastructure-provisioning/src/general/lib/aws/actions_lib.py
@@ -24,7 +24,7 @@ import backoff
 import boto3
 import botocore
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import time
diff --git a/infrastructure-provisioning/src/general/lib/aws/meta_lib.py b/infrastructure-provisioning/src/general/lib/aws/meta_lib.py
index 858c58d..4d8c397 100644
--- a/infrastructure-provisioning/src/general/lib/aws/meta_lib.py
+++ b/infrastructure-provisioning/src/general/lib/aws/meta_lib.py
@@ -23,7 +23,7 @@ import datalab.actions_lib
 import backoff
 import boto3
 import json
-import logging
+from datalab.logger import logging
 import sys
 import time
 import traceback
diff --git a/infrastructure-provisioning/src/general/lib/azure/actions_lib.py b/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
index 756dd27..35f8072 100644
--- a/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
+++ b/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
@@ -26,7 +26,7 @@ import datalab.common_lib
 import datalab.fab
 import datalab.meta_lib
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import time
diff --git a/infrastructure-provisioning/src/general/lib/azure/meta_lib.py b/infrastructure-provisioning/src/general/lib/azure/meta_lib.py
index 510e875..821d8a9 100644
--- a/infrastructure-provisioning/src/general/lib/azure/meta_lib.py
+++ b/infrastructure-provisioning/src/general/lib/azure/meta_lib.py
@@ -31,7 +31,7 @@ from azure.datalake.store import core, lib
 from azure.graphrbac import GraphRbacManagementClient
 from azure.common.credentials import ServicePrincipalCredentials
 import azure.common.exceptions as AzureExceptions
-import logging
+from datalab.logger import logging
 import traceback
 import sys
 import os
diff --git a/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py b/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py
index fa1d891..df608ea 100644
--- a/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py
+++ b/infrastructure-provisioning/src/general/lib/gcp/actions_lib.py
@@ -26,7 +26,7 @@ import datalab.fab
 import datalab.meta_lib
 import google.auth
 import json
-import logging
+from datalab.logger import logging
 import os
 import random
 import sys
diff --git a/infrastructure-provisioning/src/general/lib/gcp/meta_lib.py b/infrastructure-provisioning/src/general/lib/gcp/meta_lib.py
index be5d17b..1610729 100644
--- a/infrastructure-provisioning/src/general/lib/gcp/meta_lib.py
+++ b/infrastructure-provisioning/src/general/lib/gcp/meta_lib.py
@@ -21,7 +21,7 @@
 
 import backoff
 import google.auth
-import logging
+from datalab.logger import logging
 import os
 import re
 import sys
diff --git a/infrastructure-provisioning/src/general/lib/os/debian/ssn_lib.py b/infrastructure-provisioning/src/general/lib/os/debian/ssn_lib.py
index 5439abc..81af9b1 100644
--- a/infrastructure-provisioning/src/general/lib/os/debian/ssn_lib.py
+++ b/infrastructure-provisioning/src/general/lib/os/debian/ssn_lib.py
@@ -29,6 +29,7 @@ import traceback
 from datalab.common_lib import manage_pkg
 from datalab.fab import *
 from datalab.meta_lib import *
+from datalab.logger import logging
 from fabric import *
 import subprocess
 
diff --git a/infrastructure-provisioning/src/jupyter/scripts/configure_jupyter_node.py b/infrastructure-provisioning/src/jupyter/scripts/configure_jupyter_node.py
index 3369ab2..6975be3 100644
--- a/infrastructure-provisioning/src/jupyter/scripts/configure_jupyter_node.py
+++ b/infrastructure-provisioning/src/jupyter/scripts/configure_jupyter_node.py
@@ -27,6 +27,7 @@ import sys
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.notebook_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--hostname', type=str, default='')
@@ -71,87 +72,87 @@ gitlab_certfile = os.environ['conf_gitlab_certfile']
 # Run script #
 ##############
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     global conn
     conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
 
     # PREPARE DISK
-    print("Prepare .ensure directory")
+    logging.info("Prepare .ensure directory")
     try:
         if not exists(conn,'/home/' + args.os_user + '/.ensure_dir'):
             conn.sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
     except:
         sys.exit(1)
-    print("Mount additional volume")
+    logging.info("Mount additional volume")
     prepare_disk(args.os_user)
 
     # INSTALL LANGUAGES
-    print("Install Java")
+    logging.info("Install Java")
     ensure_jre_jdk(args.os_user)
-    print("Install Scala")
+    logging.info("Install Scala")
     ensure_scala(scala_link, args.scala_version, args.os_user)
     if os.environ['notebook_r_enabled'] == 'true':
-        print("Installing R")
+        logging.info("Installing R")
         ensure_r(args.os_user, r_libs)
-    print("Install Python 3 modules")
+    logging.info("Install Python 3 modules")
     ensure_python3_libraries(args.os_user)
 
     # INSTALL PYTHON IN VIRTUALENV
-    print("Configure Python Virtualenv")
+    logging.info("Configure Python Virtualenv")
     ensure_python_venv(python_venv_version)
 
     # INSTALL JUPYTER NOTEBOOK
-    print("Install Jupyter")
+    logging.info("Install Jupyter")
     configure_jupyter(args.os_user, jupyter_conf_file, templates_dir, jupyter_version, args.exploratory_name)
 
     # INSTALL SPARK AND CLOUD STORAGE JARS FOR SPARK
-    print("Install local Spark")
+    logging.info("Install local Spark")
     ensure_local_spark(args.os_user, spark_link, spark_version, hadoop_version, local_spark_path)
     local_spark_scala_version = conn.run(
         'export PATH=$PATH:' + local_spark_path + 'bin/; spark-submit --version 2>&1 | grep -o -P "Scala version \K.{0,7}"').stdout.replace(
         '\n', '')
-    print("Install storage jars")
+    logging.info("Install storage jars")
     ensure_local_jars(args.os_user, jars_dir)
-    print("Configure local Spark")
+    logging.info("Configure local Spark")
     configure_local_spark(jars_dir, templates_dir)
 
     # INSTALL JUPYTER KERNELS
-    #print("Install pyspark local kernel for Jupyter")
+    #logging.info("Install pyspark local kernel for Jupyter")
     #ensure_pyspark_local_kernel(args.os_user, pyspark_local_path_dir, templates_dir, spark_version)
-    print("Install py3spark local kernel for Jupyter")
+    logging.info("Install py3spark local kernel for Jupyter")
     ensure_py3spark_local_kernel(args.os_user, py3spark_local_path_dir, templates_dir, spark_version, python_venv_path, python_venv_version)
-    print("Install Toree-Scala kernel for Jupyter")
+    logging.info("Install Toree-Scala kernel for Jupyter")
     ensure_toree_local_kernel(args.os_user, toree_link, scala_kernel_path, files_dir, local_spark_scala_version, spark_version)
     if os.environ['notebook_r_enabled'] == 'true':
-        print("Install R kernel for Jupyter")
+        logging.info("Install R kernel for Jupyter")
         ensure_r_local_kernel(spark_version, args.os_user, templates_dir, r_kernels_dir)
 
     # INSTALL UNGIT
-    print("Install nodejs")
+    logging.info("Install nodejs")
     install_nodejs(args.os_user)
-    print("Install ungit")
+    logging.info("Install ungit")
     install_ungit(args.os_user, args.exploratory_name, args.edge_ip)
     if exists(conn, '/home/{0}/{1}'.format(args.os_user, gitlab_certfile)):
         install_gitlab_cert(args.os_user, gitlab_certfile)
 
     # INSTALL INACTIVITY CHECKER
-    print("Install inactivity checker")
+    logging.info("Install inactivity checker")
     install_inactivity_checker(args.os_user, args.ip_address)
 
     # INSTALL OPTIONAL PACKAGES
-    print("Installing additional Python packages")
+    logging.info("Installing additional Python packages")
     ensure_additional_python_libs(args.os_user)
-    print("Install Matplotlib")
+    logging.info("Install Matplotlib")
     ensure_matplot(args.os_user)
-    print("Install SBT")
+    logging.info("Install SBT")
     ensure_sbt(args.os_user)
-    print("Install Breeze")
+    logging.info("Install Breeze")
     add_breeze_library_local(args.os_user)
 
     #POST INSTALLATION PROCESS
-    print("Updating pyOpenSSL library")
+    logging.info("Updating pyOpenSSL library")
     update_pyopenssl_lib(args.os_user)
-    print("Removing unexisting kernels")
+    logging.info("Removing unexisting kernels")
     remove_unexisting_kernel(args.os_user)
 
     conn.close()
diff --git a/infrastructure-provisioning/src/jupyterlab/scripts/configure_jupyterlab_node.py b/infrastructure-provisioning/src/jupyterlab/scripts/configure_jupyterlab_node.py
index e8b5862..3a3bd7f 100644
--- a/infrastructure-provisioning/src/jupyterlab/scripts/configure_jupyterlab_node.py
+++ b/infrastructure-provisioning/src/jupyterlab/scripts/configure_jupyterlab_node.py
@@ -27,6 +27,7 @@ import sys
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.notebook_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--hostname', type=str, default='')
@@ -78,38 +79,38 @@ gitlab_certfile = os.environ['conf_gitlab_certfile']
 # Run script #
 ##############
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     global conn
     conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
 
     # PREPARE DISK
-    print("Prepare .ensure directory")
+    logging.info("Prepare .ensure directory")
     try:
         if not exists(conn,'/home/' + args.os_user + '/.ensure_dir'):
             conn.sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
     except:
         sys.exit(1)
-    print("Mount additional volume")
+    logging.info("Mount additional volume")
     prepare_disk(args.os_user)
 
     # INSTALL DOCKER
-    print ("Install Docker")
+    logging.info ("Install Docker")
     configure_docker(args.os_user)
 
     # CONFIGURE JUPYTER FILES
-    print("Configure jupyter files")
+    logging.info("Configure jupyter files")
     ensure_jupyterlab_files(args.os_user, jupyterlab_dir, jupyterlab_image, jupyter_conf_file, jupyterlab_conf_file, args.exploratory_name, args.edge_ip)
 
     # INSTALL UNGIT
-    print("Install nodejs")
+    logging.info("Install nodejs")
     install_nodejs(args.os_user)
-    print("Install ungit")
+    logging.info("Install ungit")
     install_ungit(args.os_user, args.exploratory_name, args.edge_ip)
     if exists(conn, '/home/{0}/{1}'.format(args.os_user, gitlab_certfile)):
         install_gitlab_cert(args.os_user, gitlab_certfile)
 
     # INSTALL INACTIVITY CHECKER
-    print("Install inactivity checker")
+    logging.info("Install inactivity checker")
     install_inactivity_checker(args.os_user, args.ip_address)
 
     conn.close()
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/project/scripts/configure_http_proxy.py b/infrastructure-provisioning/src/project/scripts/configure_http_proxy.py
index 4af93ff..ecddaf0 100644
--- a/infrastructure-provisioning/src/project/scripts/configure_http_proxy.py
+++ b/infrastructure-provisioning/src/project/scripts/configure_http_proxy.py
@@ -26,6 +26,7 @@ import json
 import sys
 from fabric import *
 from datalab.fab import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--hostname', type=str, default='')
@@ -38,7 +39,7 @@ args = parser.parse_args()
 # Run script #
 ##############
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     try:
         global conn
         conn = datalab.fab.init_datalab_connection(args.hostname, args.user, args.keyfile)
@@ -46,7 +47,7 @@ if __name__ == "__main__":
     except:
         sys.exit(2)
 
-    print("Installing proxy for notebooks.")
+    logging.info("Installing proxy for notebooks.")
     datalab.fab.configure_http_proxy_server(deeper_config)
 
     conn.close()
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/project/scripts/configure_nftables.py b/infrastructure-provisioning/src/project/scripts/configure_nftables.py
index 8fe14cd..b00e228 100644
--- a/infrastructure-provisioning/src/project/scripts/configure_nftables.py
+++ b/infrastructure-provisioning/src/project/scripts/configure_nftables.py
@@ -26,6 +26,7 @@ import json
 import sys
 from fabric import *
 from datalab.fab import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--hostname', type=str, default='')
@@ -38,7 +39,7 @@ args = parser.parse_args()
 # Run script #
 ##############
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     try:
         global conn
         conn = datalab.fab.init_datalab_connection(args.hostname, args.user, args.keyfile)
@@ -46,6 +47,6 @@ if __name__ == "__main__":
     except:
         sys.exit(2)
 
-    print("Configuring nftables on edge node.")
+    logging.info("Configuring nftables on edge node.")
     datalab.fab.configure_nftables(deeper_config)
     conn.close()
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/project/scripts/configure_nginx_reverse_proxy.py b/infrastructure-provisioning/src/project/scripts/configure_nginx_reverse_proxy.py
index 8baa5ce..472b252 100644
--- a/infrastructure-provisioning/src/project/scripts/configure_nginx_reverse_proxy.py
+++ b/infrastructure-provisioning/src/project/scripts/configure_nginx_reverse_proxy.py
@@ -22,7 +22,7 @@
 # ******************************************************************************
 
 import argparse
-import logging
+from datalab.logger import logging
 import os
 import sys
 from datalab.common_lib import ensure_step
@@ -40,26 +40,18 @@ parser.add_argument('--step_cert_sans', type=str, default='')
 args = parser.parse_args()
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'],
-                                               os.environ['project_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/edge/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
-    print("Configure connections")
+    logging.info("Configure connections")
     try:
         global conn
         conn = datalab.fab.init_datalab_connection(args.hostname, args.user, args.keyfile)
     except Exception as err:
-        print("Failed establish connection. Excpeption: " + str(err))
+        logging.error("Failed establish connection. Excpeption: " + str(err))
         sys.exit(1)
     if os.environ['conf_stepcerts_enabled'] == 'true':
         try:
             ensure_step(args.user)
         except Exception as err:
-            print("Failed install step: " + str(err))
+            logging.error("Failed install step: " + str(err))
             sys.exit(1)
 
     try:
@@ -68,7 +60,7 @@ if __name__ == "__main__":
                           args.keycloak_client_id, args.keycloak_client_secret, args.user, args.hostname,
                           args.step_cert_sans)
     except Exception as err:
-        print("Failed install nginx reverse proxy: " + str(err))
+        logging.error("Failed install nginx reverse proxy: " + str(err))
         sys.exit(1)
 
     conn.close()
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/project/scripts/reupload_ssh_key.py b/infrastructure-provisioning/src/project/scripts/reupload_ssh_key.py
index 4f8483b..0199f88 100644
--- a/infrastructure-provisioning/src/project/scripts/reupload_ssh_key.py
+++ b/infrastructure-provisioning/src/project/scripts/reupload_ssh_key.py
@@ -21,7 +21,7 @@
 #
 # ******************************************************************************
 
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -32,18 +32,10 @@ from datalab.meta_lib import *
 from fabric import *
 
 if __name__ == "__main__":
-    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
-                                               os.environ['request_id'])
-    local_log_filepath = "/logs/edge/" + local_log_filename
-    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
-                        level=logging.DEBUG,
-                        filename=local_log_filepath)
-
     try:
         if os.environ['conf_cloud_provider'] == 'aws':
             create_aws_config_files()
         logging.info('[REUPLOADING USER SSH KEY]')
-        print('[REUPLOADING USER SSH KEY]')
         reupload_config = dict()
         reupload_config['os_user'] = os.environ['conf_os_user']
         reupload_config['edge_user_name'] = os.environ['edge_user_name']
@@ -51,7 +43,7 @@ if __name__ == "__main__":
         reupload_config['resource_id'] = os.environ['resource_id']
         reupload_config['additional_config'] = {"user_keyname": reupload_config['edge_user_name'],
                                                 "user_keydir": os.environ['conf_key_dir']}
-        print(reupload_config)
+        logging.info(reupload_config)
         try:
             params = "--conf_resource {} --instance_id {} --os_user '{}'" \
                      " --keyfile '{}' --additional_config '{}'".format(
diff --git a/infrastructure-provisioning/src/rstudio/scripts/configure_rstudio_node.py b/infrastructure-provisioning/src/rstudio/scripts/configure_rstudio_node.py
index 397a22b..5ce8645 100644
--- a/infrastructure-provisioning/src/rstudio/scripts/configure_rstudio_node.py
+++ b/infrastructure-provisioning/src/rstudio/scripts/configure_rstudio_node.py
@@ -28,6 +28,7 @@ from datalab.actions_lib import *
 from datalab.common_lib import *
 from datalab.fab import *
 from datalab.notebook_lib import *
+from datalab.logger import logging
 from fabric import *
 from patchwork.files import exists
 from patchwork import files
@@ -67,58 +68,58 @@ gitlab_certfile = os.environ['conf_gitlab_certfile']
 # Run script #
 ##############
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     global conn
     conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
 
     # PREPARE DISK
-    print("Prepare .ensure directory")
+    logging.info("Prepare .ensure directory")
     try:
         if not exists(conn,'/home/' + args.os_user + '/.ensure_dir'):
             conn.sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
     except:
         sys.exit(1)
-    print("Mount additional volume")
+    logging.info("Mount additional volume")
     prepare_disk(args.os_user)
 
     # INSTALL LANGUAGES
-    print("Install Java")
+    logging.info("Install Java")
     ensure_jre_jdk(args.os_user)
-    print("Install R")
+    logging.info("Install R")
     ensure_r(args.os_user, r_libs)
-    print("Install Python 3 modules")
+    logging.info("Install Python 3 modules")
     ensure_python3_libraries(args.os_user)
 
     # INSTALL PYTHON IN VIRTUALENV
-    print("Configure Python Virtualenv")
+    logging.info("Configure Python Virtualenv")
     ensure_python_venv(python_venv_version)
 
     # INSTALL RSTUDIO
-    print("Install RStudio")
+    logging.info("Install RStudio")
     install_rstudio(args.os_user, local_spark_path, args.rstudio_pass, args.rstudio_version, python_venv_version)
 
     # INSTALL SPARK AND CLOUD STORAGE JARS FOR SPARK
-    print("Install local Spark")
+    logging.info("Install local Spark")
     ensure_local_spark(args.os_user, spark_link, spark_version, hadoop_version, local_spark_path)
-    print("Install storage jars")
+    logging.info("Install storage jars")
     ensure_local_jars(args.os_user, jars_dir)
-    print("Configure local Spark")
+    logging.info("Configure local Spark")
     configure_local_spark(jars_dir, templates_dir)
 
     # INSTALL UNGIT
-    print("Install nodejs")
+    logging.info("Install nodejs")
     install_nodejs(args.os_user)
-    print("Install Ungit")
+    logging.info("Install Ungit")
     install_ungit(args.os_user, args.exploratory_name, args.edge_ip)
     if exists(conn, '/home/{0}/{1}'.format(args.os_user, gitlab_certfile)):
         install_gitlab_cert(args.os_user, gitlab_certfile)
 
     # INSTALL INACTIVITY CHECKER
-    print("Install inactivity checker")
+    logging.info("Install inactivity checker")
     install_inactivity_checker(args.os_user, args.ip_address, True)
 
     #POST INSTALLATION PROCESS
-    print("Updating pyOpenSSL library")
+    logging.info("Updating pyOpenSSL library")
     update_pyopenssl_lib(args.os_user)
 
     conn.close()
diff --git a/infrastructure-provisioning/src/ssn/scripts/backup.py b/infrastructure-provisioning/src/ssn/scripts/backup.py
index dc6d91c..971551a 100644
--- a/infrastructure-provisioning/src/ssn/scripts/backup.py
+++ b/infrastructure-provisioning/src/ssn/scripts/backup.py
@@ -28,6 +28,7 @@ import sys
 import yaml
 from fabric import *
 from time import gmtime, strftime
+from datalab.logger import logging
 import subprocess
 
 parser = argparse.ArgumentParser(description="Backup script for DataLab configs, keys, certs, jars, database & logs")
@@ -72,9 +73,9 @@ def backup_prepare():
 
 def backup_configs():
     try:
-        print('Backup configs: {}'.format(args.configs))
+        logging.info('Backup configs: {}'.format(args.configs))
         if args.configs == 'skip':
-            print('Skipped config backup.')
+            logging.info('Skipped config backup.')
         elif args.configs == 'all':
             subprocess.run("find {0}{2} -name '*yml' -exec cp {3} {1}{2} \;".format(args.datalab_path, temp_folder, conf_folder,
                                                                            "{}"), shell=True, check=True)
@@ -88,9 +89,9 @@ def backup_configs():
 
 def backup_keys():
     try:
-        print('Backup keys: {}'.format(args.keys))
+        logging.info('Backup keys: {}'.format(args.keys))
         if args.keys == 'skip':
-            print('Skipped keys backup.')
+            logging.info('Skipped keys backup.')
         elif args.keys == 'all':
             subprocess.run('cp {0}* {1}keys'.format(keys_folder, temp_folder), shell=True, check=True)
         else:
@@ -103,9 +104,9 @@ def backup_keys():
 
 def backup_certs():
     try:
-        print('Backup certs: {}'.format(args.certs))
+        logging.info('Backup certs: {}'.format(args.certs))
         if args.certs == 'skip':
-            print('Skipped certs backup.')
+            logging.info('Skipped certs backup.')
         elif args.certs == 'all':
             for cert in all_certs:
                 subprocess.run('sudo cp {0}{1} {2}certs'.format(certs_folder, cert, temp_folder), shell=True, check=True)
@@ -121,9 +122,9 @@ def backup_certs():
 
 def backup_jars():
     try:
-        print('Backup jars: {}'.format(args.jars))
+        logging.info('Backup jars: {}'.format(args.jars))
         if args.jars == 'skip':
-            print('Skipped jars backup.')
+            logging.info('Skipped jars backup.')
         elif args.jars == 'all':
             for root, dirs, files in os.walk('{0}{1}'.format(args.datalab_path, jars_folder)):
                 for service in dirs:
@@ -138,7 +139,7 @@ def backup_jars():
 
 def backup_database():
     try:
-        print('Backup db: {}'.format(args.db))
+        logging.info('Backup db: {}'.format(args.db))
         if args.db:
             ssn_conf = open('{0}{1}ssn.yml'.format(args.datalab_path, conf_folder)).read()
             data = yaml.load('mongo{}'.format(ssn_conf.split('mongo')[-1]))
@@ -153,30 +154,30 @@ def backup_database():
 
 def backup_logs():
     try:
-        print('Backup logs: {}'.format(args.logs))
+        logging.info('Backup logs: {}'.format(args.logs))
         if args.logs:
-            print('Backup DataLab logs')
+            logging.info('Backup DataLab logs')
             subprocess.run('cp -R {0}* {1}logs'.format(datalab_logs_folder, temp_folder), shell=True, check=True)
-            print('Backup docker logs')
+            logging.info('Backup docker logs')
             subprocess.run("sudo find {0} -name '*log' -exec cp {2} {1}logs/docker \;".format(docker_logs_folder, temp_folder,
                                                                                      "{}"), shell=True, check=True)
             subprocess.run('sudo chown -R {0}:{0} {1}logs/docker'.format(os_user, temp_folder), shell=True, check=True)
     except:
         append_result(error='Backup logs failed.')
-        print('Backup logs failed.')
+        logging.error('Backup logs failed.')
         sys.exit(1)
 
 
 def backup_finalize():
     try:
-        print('Compressing all files to archive...')
+        logging.info('Compressing all files to archive...')
         subprocess.run('cd {0} && tar -zcf {1} .'.format(temp_folder, dest_file), shell=True, check=True)
     except Exception as err:
         append_result(error='Compressing backup failed. {}'.format(str(err)))
         sys.exit(1)
 
     try:
-        print('Clear temp folder...')
+        logging.info('Clear temp folder...')
         if temp_folder != '/':
             subprocess.run('rm -rf {}'.format(temp_folder), shell=True, check=True)
     except Exception as err:
@@ -189,12 +190,12 @@ def append_result(status='failed', error='', backup_file=''):
         res = {"status": status,
                "request_id": args.request_id}
         if status == 'failed':
-            print(error)
+            logging.info(error)
             res['error_message'] = error
         elif status == 'created':
-            print('Successfully created backup file: {}'.format(backup_file))
+            logging.info('Successfully created backup file: {}'.format(backup_file))
             res['backup_file'] = backup_file
-        print(json.dumps(res))
+        logging.info(json.dumps(res))
         result.write(json.dumps(res))
 
 
diff --git a/infrastructure-provisioning/src/ssn/scripts/configure_billing.py b/infrastructure-provisioning/src/ssn/scripts/configure_billing.py
index f3357a5..7ef7380 100644
--- a/infrastructure-provisioning/src/ssn/scripts/configure_billing.py
+++ b/infrastructure-provisioning/src/ssn/scripts/configure_billing.py
@@ -24,6 +24,7 @@
 import argparse
 import sys
 from fabric import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--cloud_provider', type=str,
@@ -141,7 +142,7 @@ def yml_billing(path):
         f.write(config_orig)
         f.close()
     except:
-        print("Could not write the target file {}".format(path))
+        logging.error("Could not write the target file {}".format(path))
         sys.exit(1)
 
 def yml_billing_app(path):
@@ -162,7 +163,7 @@ def yml_billing_app(path):
         f.write(config_orig)
         f.close()
     except:
-        print("Could not write the target file {}".format(path))
+        logging.error("Could not write the target file {}".format(path))
         sys.exit(1)
 
 
@@ -177,7 +178,7 @@ def yml_self_service(path):
         f.write(config_orig)
         f.close()
     except:
-        print("Could not write the target file {}".format(path))
+        logging.error("Could not write the target file {}".format(path))
         sys.exit(1)
 
 
@@ -185,7 +186,7 @@ def yml_self_service(path):
 # Run script #
 ##############
 if __name__ == "__main__":
-    print("Configure billing")
+    logging.info("Configure billing")
     # Check cloud provider
     # Access to the bucket without credentials?
     try:
@@ -194,7 +195,7 @@ if __name__ == "__main__":
             yml_billing_app(args.datalab_dir + 'conf/billing_app.yml')
         yml_self_service(args.datalab_dir + 'conf/self-service.yml')
     except:
-        print('Error configure billing')
+        logging.error('Error configure billing')
         sys.exit(1)
 
     sys.exit(0)
diff --git a/infrastructure-provisioning/src/ssn/scripts/configure_conf_file.py b/infrastructure-provisioning/src/ssn/scripts/configure_conf_file.py
index dbcd025..bb7d42d 100644
--- a/infrastructure-provisioning/src/ssn/scripts/configure_conf_file.py
+++ b/infrastructure-provisioning/src/ssn/scripts/configure_conf_file.py
@@ -26,6 +26,7 @@ import argparse
 import json
 import sys
 from fabric import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--datalab_dir', type=str, default='')
@@ -55,18 +56,18 @@ def modify_conf_file():
             options = config.options(section)
             for option in options:
                 try:
-                    print('Trying to put variable {}_{} to conf file'.format(section, option))
+                    logging.info('Trying to put variable {}_{} to conf file'.format(section, option))
                     config.set(section, option, variables_list['{}_{}'.format(section, option)])
                 except:
-                    print('Such variable doesn`t exist!')
+                    logging.error('Such variable doesn`t exist!')
                     config.remove_option(section, option)
 
         with open('{}sources/infrastructure-provisioning/src/general/conf/overwrite.ini'.format(args.datalab_dir),
                   'w') as conf_file_final:
             config.write(conf_file_final)
     except Exception as error:
-        print('Error with modifying conf files:')
-        print(str(error))
+        logging.error('Error with modifying conf files:')
+        logging.error(str(error))
         sys.exit(1)
 
 
diff --git a/infrastructure-provisioning/src/ssn/scripts/configure_docker.py b/infrastructure-provisioning/src/ssn/scripts/configure_docker.py
index 1e6e31d..bc64c58 100644
--- a/infrastructure-provisioning/src/ssn/scripts/configure_docker.py
+++ b/infrastructure-provisioning/src/ssn/scripts/configure_docker.py
@@ -27,6 +27,7 @@ import os
 import sys
 import time
 from datalab.ssn_lib import *
+from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -69,7 +70,7 @@ def download_toree():
         conn.run('mv ./toree-0.5.0-incubating/lib/toree-assembly-0.5.0-incubating.jar {}toree-assembly-0.5.0.jar'.format(toree_path))
     except Exception as err:
         traceback.print_exc()
-        print('Failed to download toree: ', str(err))
+        logging.error('Failed to download toree: ', str(err))
         sys.exit(1)
 
 
@@ -86,7 +87,7 @@ def login_in_gcr(os_user, gcr_creds, odahu_image, datalab_path, cloud_provider):
                     conn.sudo('apt-get -y install google-cloud-sdk')
                 except Exception as err:
                     traceback.print_exc()
-                    print('Failed to install gcloud: ', str(err))
+                    logging.error('Failed to install gcloud: ', str(err))
                     sys.exit(1)
             try:
                 host_string = '{}@{}'.format(args.os_user, args.hostname)
@@ -99,11 +100,11 @@ def login_in_gcr(os_user, gcr_creds, odahu_image, datalab_path, cloud_provider):
                      .format(odahu_image, datalab_path, cloud_provider))
             except Exception as err:
                 traceback.print_exc()
-                print('Failed to prepare odahu image: ', str(err))
+                logging.error('Failed to prepare odahu image: ', str(err))
                 sys.exit(1)
         except Exception as err:
             traceback.print_exc()
-            print('Failed to prepare odahu image: ', str(err))
+            logging.error('Failed to prepare odahu image: ', str(err))
             sys.exit(1)
 
 def build_docker_images(image_list):
@@ -182,7 +183,7 @@ def configure_guacamole():
         return True
     except Exception as err:
         traceback.print_exc()
-        print('Failed to configure guacamole: ', str(err))
+        logging.error('Failed to configure guacamole: ', str(err))
         return False
 
 def status_container_removal_cron():
@@ -190,14 +191,14 @@ def status_container_removal_cron():
         conn.sudo('bash -c \'echo "*/15 * * * * datalab-user docker container prune -f --filter until=50m --filter label=edge_status" >> /etc/crontab\'')
     except Exception as err:
         traceback.print_exc()
-        print('Failed to create admin status container removal cron: ', str(err))
+        logging.error('Failed to create admin status container removal cron: ', str(err))
         sys.exit(1)
 
 ##############
 # Run script #
 ##############
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     try:
         global conn
         conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
@@ -205,34 +206,34 @@ if __name__ == "__main__":
     except:
         sys.exit(2)
 
-    print('Modifying configuration files')
+    logging.info('Modifying configuration files')
     try:
         modify_conf_file(args)
     except Exception as err:
-        print('Error:', str(err))
+        logging.info('Error:', str(err))
         sys.exit(1)
 
-    print("Downloading Apache Toree")
+    logging.info("Downloading Apache Toree")
     download_toree()
 
-    print("Installing docker daemon")
+    logging.info("Installing docker daemon")
     if not ensure_docker_daemon(args.datalab_path, args.os_user, args.region):
         sys.exit(1)
 
-    print("Login in Google Container Registry")
+    logging.info("Login in Google Container Registry")
     login_in_gcr(args.os_user, args.gcr_creds, args.odahu_image, args.datalab_path, args.cloud_provider)
 
-    print("Building Datalab images")
+    logging.info("Building Datalab images")
     count = 0
     while not build_docker_images(deeper_config) and count < 5:
         count += 1
         time.sleep(5)
 
-    print("Configuring guacamole")
+    logging.info("Configuring guacamole")
     if not configure_guacamole():
         sys.exit(1)
 
-    print("Adding cron to remove edge status containers")
+    logging.info("Adding cron to remove edge status containers")
     status_container_removal_cron()
 
     conn.close()
diff --git a/infrastructure-provisioning/src/ssn/scripts/configure_gitlab.py b/infrastructure-provisioning/src/ssn/scripts/configure_gitlab.py
index 59e70e1..b248932 100644
--- a/infrastructure-provisioning/src/ssn/scripts/configure_gitlab.py
+++ b/infrastructure-provisioning/src/ssn/scripts/configure_gitlab.py
@@ -28,6 +28,7 @@ import sys
 import subprocess
 from fabric import *
 from datalab.fab import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--keyfile', type=str, default='')
@@ -60,7 +61,7 @@ def create_user(os_user):
         conn.sudo('chmod 600 /home/{0}/.ssh/authorized_keys'.format(os_user))
         conn.sudo('touch /home/{}/.ssh_user_ensured'.format(initial_user))
     except Exception as err:
-        print('Failed to install gitlab.{}'.format(str(err)))
+        logging.error('Failed to install gitlab.{}'.format(str(err)))
         sys.exit(1)
     conn.close()
 
@@ -97,15 +98,15 @@ def prepare_config():
             subprocess.run("sed -i 's/LDAP_ATTR_EMAIL/{}/g' gitlab.rb".format(os.environ['ldap_attr_email']), shell=True, check=True)
 
             subprocess.run("sed -i 's/GITLAB_ROOT_PASSWORD/{}/g' gitlab.rb".format(os.environ['gitlab_root_password']), shell=True, check=True)
-        print('Initial config is ready.')
+        logging.info('Initial config is ready.')
     except Exception as err:
-        print('Failed to install gitlab.{}'.format(str(err)))
+        logging.error('Failed to install gitlab.{}'.format(str(err)))
         sys.exit(1)
 
 
 def install_gitlab():
     try:
-        print('Installing gitlab...')
+        logging.info('Installing gitlab...')
         if os.environ['conf_os_family'] == 'debian':
             conn.sudo('curl -sS https://packages.gitlab.com/install/repositories/gitlab/gitlab-ce/script.deb.sh | sudo bash')
             conn.sudo('apt install gitlab-ce -y')
@@ -113,7 +114,7 @@ def install_gitlab():
             conn.sudo('curl -sS https://packages.gitlab.com/install/repositories/gitlab/gitlab-ce/script.rpm.sh | sudo bash')
             conn.sudo('yum install gitlab-ce -y')
         else:
-            print('Failed to install gitlab.')
+            logging.info('Failed to install gitlab.')
             raise Exception
 
         with lcd('{}tmp/gitlab'.format(os.environ['conf_datalab_path'])):
@@ -134,7 +135,7 @@ def install_gitlab():
 
         conn.sudo('gitlab-ctl reconfigure')
     except Exception as err:
-        print('Failed to install gitlab.{}'.format(str(err)))
+        logging.error('Failed to install gitlab.{}'.format(str(err)))
         sys.exit(1)
 
 
@@ -151,15 +152,15 @@ def configure_gitlab():
                     .format(proto, os.environ['gitlab_root_password'])).stdout.replace('\n','')
             data = json.loads(raw)
             if not json.loads(os.environ['gitlab_signup_enabled']):
-                print('Disabling signup...')
+                logging.info('Disabling signup...')
                 conn.run('curl -k --request PUT "{0}://localhost/api/v4/application/settings?private_token={1}&sudo=root&signup_enabled=false"'
                     .format(proto, data['private_token']))
             if not json.loads(os.environ['gitlab_public_repos']):
-                print('Disabling public repos...')
+                logging.info('Disabling public repos...')
                 conn.run('curl -k --request PUT "{0}://localhost/api/v4/application/settings?private_token={1}&sudo=root&restricted_visibility_levels=public"'
                     .format(proto, data['private_token']))
     except Exception as err:
-        print("Failed to connect to GitLab via API..{}".format(str(err)))
+        logging.error("Failed to connect to GitLab via API..{}".format(str(err)))
         sys.exit(1)
 
 
@@ -175,9 +176,9 @@ def summary():
     data['os_family'] = os.environ['conf_os_family']
     data['os_user'] = os.environ['conf_os_user']
     data['key_name'] = os.environ['conf_key_name']
-    print('[SUMMARY]')
+    logging.info('[SUMMARY]')
     for key in data:
-        print('{0}: {1}'.format(key, data[key]))
+        logging.info('{0}: {1}'.format(key, data[key]))
 
     with open('{}tmp/result/gitlab.json'.format(os.environ['conf_datalab_path']), 'w') as result:
         result.write(json.dumps(data))
diff --git a/infrastructure-provisioning/src/ssn/scripts/configure_mongo.py b/infrastructure-provisioning/src/ssn/scripts/configure_mongo.py
index c0d7f2f..acb9364 100644
--- a/infrastructure-provisioning/src/ssn/scripts/configure_mongo.py
+++ b/infrastructure-provisioning/src/ssn/scripts/configure_mongo.py
@@ -28,6 +28,7 @@ import time
 import yaml
 #from datalab.fab import *
 from pymongo import MongoClient
+from datalab.logger import logging
 
 path = "/etc/mongod.conf"
 outfile = "/etc/mongo_params.yml"
@@ -55,7 +56,7 @@ def add_2_yml_config(path, section, param, value):
             yaml.dump(config_orig, outfile_yml_w, default_flow_style=False)
         return True
     except:
-        print("Could not write the target file")
+        logging.error("Could not write the target file")
         return False
 
 
@@ -66,7 +67,7 @@ def read_yml_conf(path, section, param):
         result = config[section][param]
         return result
     except:
-        print("File does not exist")
+        logging.error("File does not exist")
         return ''
 
 
@@ -94,7 +95,7 @@ if __name__ == "__main__":
             command = ['service', 'mongod', 'restart']
             subprocess.call(command, shell=False)
     except:
-        print("Looks like MongoDB have already been secured")
+        logging.error("Looks like MongoDB have already been secured")
         pass_upd = False
 
     # Generating output config
diff --git a/infrastructure-provisioning/src/ssn/scripts/configure_ssn_node.py b/infrastructure-provisioning/src/ssn/scripts/configure_ssn_node.py
index 03ef2f7..a4d1ef3 100644
--- a/infrastructure-provisioning/src/ssn/scripts/configure_ssn_node.py
+++ b/infrastructure-provisioning/src/ssn/scripts/configure_ssn_node.py
@@ -29,6 +29,7 @@ import traceback
 from datalab.common_lib import *
 from datalab.fab import *
 from datalab.ssn_lib import *
+from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -47,7 +48,7 @@ def set_hostname(subdomain, hosted_zone_name):
         conn.sudo('hostnamectl set-hostname {0}.{1}'.format(subdomain, hosted_zone_name))
     except Exception as err:
         traceback.print_exc()
-        print('Failed to set hostname: ', str(err))
+        logging.error('Failed to set hostname: ', str(err))
         sys.exit(1)
 
 def set_resolve():
@@ -55,7 +56,7 @@ def set_resolve():
         conn.sudo('ln -sf /run/systemd/resolve/resolv.conf /etc/resolv.conf')
     except Exception as err:
         traceback.print_exc()
-        print('Failed to set resolve: ', str(err))
+        logging.error('Failed to set resolve: ', str(err))
         sys.exit(1)
 
 def cp_key(keyfile, host_string, os_user):
@@ -67,7 +68,7 @@ def cp_key(keyfile, host_string, os_user):
         conn.sudo('chmod 600 /home/' + os_user + '/keys/*.pem')
     except Exception as err:
         traceback.print_exc()
-        print('Failed to copy key: ', str(err))
+        logging.error('Failed to copy key: ', str(err))
         sys.exit(1)
 
 
@@ -78,7 +79,7 @@ def cp_backup_scripts(datalab_path):
         conn.run('chmod +x {0}tmp/backup.py {0}tmp/restore.py'.format(datalab_path))
     except Exception as err:
         traceback.print_exc()
-        print('Failed to copy backup scripts: ', str(err))
+        logging.error('Failed to copy backup scripts: ', str(err))
         sys.exit(1)
 
 
@@ -98,7 +99,7 @@ def cp_gitlab_scripts(datalab_path):
         conn.run('cd {}tmp/gitlab && sed -i "s/SERVICE_BASE_NAME/{}/g" gitlab.ini'.format(datalab_path, os.environ['conf_service_base_name']))
     except Exception as err:
         traceback.print_exc()
-        print('Failed to copy gitlab scripts: ', str(err))
+        logging.error('Failed to copy gitlab scripts: ', str(err))
         sys.exit(1)
 
 
@@ -123,7 +124,7 @@ def creating_service_directories(datalab_path, os_user):
             conn.sudo('chown -R ' + os_user + ':' + os_user + ' ' + datalab_path)
     except Exception as err:
         traceback.print_exc()
-        print('Failed to create service directories: ', str(err))
+        logging.error('Failed to create service directories: ', str(err))
         sys.exit(1)
 
 
@@ -191,7 +192,7 @@ def configure_ssl_certs(hostname, custom_ssl_cert):
         conn.sudo('openssl dhparam -out /etc/ssl/certs/dhparam.pem 2048')
     except Exception as err:
         traceback.print_exc()
-        print('Failed to configure SSL certificates: ', str(err))
+        logging.error('Failed to configure SSL certificates: ', str(err))
         sys.exit(1)
 
 def docker_build_script():
@@ -201,7 +202,7 @@ def docker_build_script():
         conn.sudo('mv docker_build /usr/bin/docker-build')
     except Exception as err:
         traceback.print_exc()
-        print('Failed to configure docker_build script: ', str(err))
+        logging.error('Failed to configure docker_build script: ', str(err))
         sys.exit(1)
 
 ##############
@@ -210,7 +211,7 @@ def docker_build_script():
 
 
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     try:
         global conn
         conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
@@ -228,31 +229,31 @@ if __name__ == "__main__":
     else:
         custom_ssl_cert = False
 
-    print('Setting resolve DNS configuration')
+    logging.info('Setting resolve DNS configuration')
     set_resolve()
 
-    print("Creating service directories.")
+    logging.info("Creating service directories.")
     creating_service_directories(args.datalab_path, args.os_user)
 
     if domain_created:
-        print("Setting hostname")
+        logging.info("Setting hostname")
         set_hostname(os.environ['ssn_subdomain'], os.environ['ssn_hosted_zone_name'])
         args.hostname = "{0}.{1}".format(os.environ['ssn_subdomain'], os.environ['ssn_hosted_zone_name'])
 
-    print("Installing nginx as frontend.")
+    logging.info("Installing nginx as frontend.")
     ensure_nginx(args.datalab_path)
 
-    print("Installing Java")
+    logging.info("Installing Java")
     ensure_java(args.os_user)
 
-    print("Configuring ssl key and cert for nginx.")
+    logging.info("Configuring ssl key and cert for nginx.")
     configure_ssl_certs(args.hostname, custom_ssl_cert)
 
-    print("Configuring nginx.")
+    logging.info("Configuring nginx.")
     configure_nginx(deeper_config, args.datalab_path, args.hostname)
 
     if os.environ['conf_letsencrypt_enabled'] == 'true':
-        print("Configuring letsencrypt certificates.")
+        logging.info("Configuring letsencrypt certificates.")
         install_certbot(args.os_user)
         if 'conf_letsencrypt_email' in os.environ:
             run_certbot(os.environ['conf_letsencrypt_domain_name'], 'ssn', os.environ['conf_letsencrypt_email'])
@@ -260,25 +261,25 @@ if __name__ == "__main__":
             run_certbot(os.environ['conf_letsencrypt_domain_name'], 'ssn')
         configure_nginx_LE(os.environ['conf_letsencrypt_domain_name'], 'ssn')
 
-    # print("Installing jenkins.")
+    # logging.info("Installing jenkins.")
     # ensure_jenkins(args.datalab_path)
 
-    # print("Configuring jenkins.")
+    # logging.info("Configuring jenkins.")
     #configure_jenkins(args.datalab_path, args.os_user, deeper_config, args.tag_resource_id)
 
-    print("Copying key")
+    logging.info("Copying key")
     cp_key(args.keyfile, host_string, args.os_user)
 
-    print("Copying backup scripts")
+    logging.info("Copying backup scripts")
     cp_backup_scripts(args.datalab_path)
 
-    print("Copying gitlab scripts & files")
+    logging.info("Copying gitlab scripts & files")
     cp_gitlab_scripts(args.datalab_path)
 
-    print("Ensuring safest ssh ciphers")
+    logging.info("Ensuring safest ssh ciphers")
     ensure_ciphers()
 
-    print("Configuring docker_build script")
+    logging.info("Configuring docker_build script")
     docker_build_script()
 
     conn.close()
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/ssn/scripts/configure_ui.py b/infrastructure-provisioning/src/ssn/scripts/configure_ui.py
index d9327aa..db05276 100644
--- a/infrastructure-provisioning/src/ssn/scripts/configure_ui.py
+++ b/infrastructure-provisioning/src/ssn/scripts/configure_ui.py
@@ -23,7 +23,7 @@
 
 import argparse
 import json
-import logging
+from datalab.logger import logging
 import os
 import sys
 import traceback
@@ -103,7 +103,7 @@ def copy_ssn_libraries():
             conn.sudo('ln -fs /usr/lib/python3.8/datalab /usr/lib64/python3.8/datalab')
     except Exception as err:
         traceback.print_exc()
-        print('Failed to copy ssn libraries: ', str(err))
+        logging.error('Failed to copy ssn libraries: ', str(err))
         sys.exit(1)
 
 
@@ -142,7 +142,7 @@ def configure_mongo(mongo_passwd, default_endpoint_name):
             args.datalab_path))
     except Exception as err:
         traceback.print_exc()
-        print('Failed to configure MongoDB: ', str(err))
+        logging.error('Failed to configure MongoDB: ', str(err))
         sys.exit(1)
 
 
@@ -179,7 +179,7 @@ def build_ui():
             except:
                 conn.run('if ! grep -w -E "(ERROR)" /tmp/maven.log > /tmp/maven_error.log; then echo "no_error" > /tmp/maven_error.log;fi')
                 conn.run('cat /tmp/maven_error.log')
-                print('Failed to build Back-end: ', str(err))
+                logging.error('Failed to build Back-end: ', str(err))
                 sys.exit(1)
         conn.sudo('mkdir -p {}webapp/'.format(args.datalab_path))
         for service in ['self-service', 'provisioning-service', 'billing']:
@@ -213,7 +213,7 @@ def build_ui():
                     args.datalab_path))
     except Exception as err:
         traceback.print_exc()
-        print('Failed to build UI: ', str(err))
+        logging.error('Failed to build UI: ', str(err))
         sys.exit(1)
 
 
@@ -221,7 +221,7 @@ def build_ui():
 # Run script #
 ##############
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     try:
         global conn
         conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
@@ -230,28 +230,28 @@ if __name__ == "__main__":
     except:
         sys.exit(2)
 
-    print("Copying DataLab libraries to SSN")
+    logging.info("Copying DataLab libraries to SSN")
     copy_ssn_libraries()
 
-    print("Installing Supervisor")
+    logging.info("Installing Supervisor")
     ensure_supervisor()
 
-    print("Installing MongoDB")
+    logging.info("Installing MongoDB")
     ensure_mongo()
 
-    print("Configuring MongoDB")
+    logging.info("Configuring MongoDB")
     configure_mongo(mongo_passwd, args.default_endpoint_name)
 
     conn.sudo('bash -c "echo DATALAB_CONF_DIR={} >> /etc/profile"'.format(datalab_conf_dir))
     conn.sudo('bash -c "echo export DATALAB_CONF_DIR >> /etc/profile"')
 
-    print("Installing build dependencies for UI")
+    logging.info("Installing build dependencies for UI")
     install_build_dep()
 
-    print("Building UI")
+    logging.info("Building UI")
     build_ui()
 
-    print("Starting Self-Service(UI)")
+    logging.info("Starting Self-Service(UI)")
     start_ss(args.keyfile, host_string, datalab_conf_dir, web_path,
              args.os_user, mongo_passwd, keystore_passwd, args.cloud_provider,
              args.service_base_name, args.tag_resource_id, args.billing_tag, args.account_id,
diff --git a/infrastructure-provisioning/src/ssn/scripts/docker_build.py b/infrastructure-provisioning/src/ssn/scripts/docker_build.py
index f0ddafe..37c6526 100644
--- a/infrastructure-provisioning/src/ssn/scripts/docker_build.py
+++ b/infrastructure-provisioning/src/ssn/scripts/docker_build.py
@@ -28,6 +28,7 @@ import subprocess
 from fabric import *
 from os.path import exists
 from os import path
+from datalab.logger import logging
 
 src_path = '/opt/datalab/sources/infrastructure-provisioning/src/'
 if sys.argv[1] == 'all':
@@ -71,7 +72,7 @@ def image_build(src_path, node):
                 subprocess.run('cd {3}; docker build --build-arg OS={0} --file general/files/{1}/{2}_Dockerfile -t docker.datalab-{2} .'.format(
                             os_family, cloud_provider, node[i], src_path), shell=True, check=True)
         except Exception as err:
-            print("Failed to build {} image".format(node[i]), str(err))
+            logging.error("Failed to build {} image".format(node[i]), str(err))
             raise Exception
     except Exception as err:
         traceback.print_exc()
diff --git a/infrastructure-provisioning/src/ssn/scripts/gitlab_deploy.py b/infrastructure-provisioning/src/ssn/scripts/gitlab_deploy.py
index 813ea47..8bcdd7d 100644
--- a/infrastructure-provisioning/src/ssn/scripts/gitlab_deploy.py
+++ b/infrastructure-provisioning/src/ssn/scripts/gitlab_deploy.py
@@ -23,9 +23,10 @@
 
 from ConfigParser import ConfigParser
 from fabric import *
+from datalab.logger import logging
 import argparse
 import boto3
-from botocore.client import Config as botoConfig`
+from botocore.client import Config as botoConfig
 import sys
 import os
 
@@ -49,7 +50,7 @@ def read_ini():
                         if var not in os.environ:
                             os.environ[var] = config.get(section, option)
     except Exception as err:
-        print('Failed to read conf file.{}'.format(str(err)))
+        logging.error('Failed to read conf file.{}'.format(str(err)))
         sys.exit(1)
 
 
@@ -70,14 +71,14 @@ def create_instance():
                                          InstanceType=os.environ['aws_instance_type'],
                                          SubnetId=os.environ['aws_subnet_id'])
         for instance in instances:
-            print('Waiting for instance {} become running.'.format(instance.id))
+            logging.info('Waiting for instance {} become running.'.format(instance.id))
             instance.wait_until_running()
             node_name = '{0}-{1}'.format(os.environ['conf_service_base_name'], os.environ['conf_node_name'])
             instance.create_tags(Tags=[{'Key': 'Name', 'Value': node_name}])
             return instance.id
         return ''
     except Exception as err:
-        print("Failed to create instance.{}".format(str(err)))
+        logging.error("Failed to create instance.{}".format(str(err)))
         sys.exit(1)
 
 
@@ -114,7 +115,7 @@ def get_ami_id(ami_name):
             raise Exception("Unable to find image id with name: " + ami_name)
         return image_id
     except Exception as err:
-        print("Failed to get AMI ID.{}".format(str(err)))
+        logging.error("Failed to get AMI ID.{}".format(str(err)))
 
 
 def create_elastic_ip(instance_id):
@@ -123,9 +124,9 @@ def create_elastic_ip(instance_id):
         response = client.allocate_address(Domain='vpc')
         allocation_id = response.get('AllocationId')
         response = client.associate_address(InstanceId=instance_id, AllocationId=allocation_id)
-        print('Association ID: {}'.format(response.get('AssociationId')))
+        logging.info('Association ID: {}'.format(response.get('AssociationId')))
     except Exception as err:
-        print('Failed to allocate elastic IP.{}'.format(str(err)))
+        logging.error('Failed to allocate elastic IP.{}'.format(str(err)))
         sys.exit(1)
 
 
@@ -137,7 +138,7 @@ def get_ec2_ip(instance_id):
         for instance in instances:
             return getattr(instance, 'public_dns_name')
     except Exception as e:
-        print('Failed to get instance IP.{}'.format(str(e)))
+        logging.error('Failed to get instance IP.{}'.format(str(e)))
         sys.exit(1)
 
 
@@ -147,7 +148,7 @@ def put_to_bucket(bucket_name, local_file, destination_file):
         with open(local_file, 'rb') as data:
             s3.upload_fileobj(data, bucket_name, destination_file, ExtraArgs={'ServerSideEncryption': 'AES256'})
     except Exception as err:
-        print('Unable to upload files to S3 bucket.{}'.format(str(err)))
+        logging.error('Unable to upload files to S3 bucket.{}'.format(str(err)))
         sys.exit(1)
 
 
@@ -156,7 +157,7 @@ def terminate_gitlab():
         ec2 = boto3.resource('ec2')
         client = boto3.client('ec2')
         node_name = '{0}-{1}'.format(os.environ['conf_service_base_name'], os.environ['conf_node_name'])
-        print('Terminating "{}" instance...'.format(node_name))
+        logging.info('Terminating "{}" instance...'.format(node_name))
         inst = ec2.instances.filter(
             Filters=[{'Name': 'instance-state-name', 'Values': ['running', 'stopped', 'pending', 'stopping']},
                      {'Name': 'tag:Name', 'Values': ['{}'.format(node_name)]}])
@@ -175,19 +176,19 @@ def terminate_gitlab():
                                     association_id = el_ip.get('AssociationId')
                                     client.disassociate_address(AssociationId=association_id)
                                     client.release_address(AllocationId=allocation_id)
-                                    print('Releasing Elastic IP: {}'.format(elastic_ip))
+                                    logging.info('Releasing Elastic IP: {}'.format(elastic_ip))
                             except:
-                                print('There is no such Elastic IP: {}'.format(elastic_ip))
+                                logging.error('There is no such Elastic IP: {}'.format(elastic_ip))
                 except Exception as err:
-                    print('There is no Elastic IP to disassociate from instance: {}'.format(instance.id), str(err))
+                    logging.error('There is no Elastic IP to disassociate from instance: {}'.format(instance.id), str(err))
                 client.terminate_instances(InstanceIds=[instance.id])
                 waiter = client.get_waiter('instance_terminated')
                 waiter.wait(InstanceIds=[instance.id])
-                print('The instance {} has been terminated successfully'.format(instance.id))
+                logging.info('The instance {} has been terminated successfully'.format(instance.id))
         else:
-            print('There are no instances with "{}" tag to terminate'.format(node_name))
+            logging.info('There are no instances with "{}" tag to terminate'.format(node_name))
     except Exception as err:
-        print('Failed to terminate gitlab instance. {}'.format(str(err)))
+        logging.error('Failed to terminate gitlab instance. {}'.format(str(err)))
 
 
 if __name__ == "__main__":
@@ -196,11 +197,11 @@ if __name__ == "__main__":
 
     if args.action == 'create':
         instance_id = create_instance()
-        print('Instance {} created.'.format(instance_id))
+        logging.info('Instance {} created.'.format(instance_id))
         create_elastic_ip(instance_id)
         os.environ['instance_id'] = instance_id
         os.environ['instance_hostname'] = get_ec2_ip(instance_id)
-        print('Instance hostname: {}'.format(os.environ['instance_hostname']))
+        logging.info('Instance hostname: {}'.format(os.environ['instance_hostname']))
 
         keyfile = '{}'.format('{}{}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name']))
         params = '--keyfile {0} --instance_ip {1}'.format(keyfile, os.environ['instance_hostname'])
@@ -210,7 +211,7 @@ if __name__ == "__main__":
         try:
             subprocess.run('{0}/{1}.py {2}'.format(head, 'configure_gitlab', params), shell=True, check=True)
         except Exception as err:
-            print('Failed to configure gitlab. {}'.format(str(err)))
+            logging.error('Failed to configure gitlab. {}'.format(str(err)))
             terminate_gitlab()
             sys.exit(1)
 
@@ -224,4 +225,4 @@ if __name__ == "__main__":
         terminate_gitlab()
 
     else:
-        print('Unknown action. Try again.')
+        logging.error('Unknown action. Try again.')
diff --git a/infrastructure-provisioning/src/ssn/scripts/resource_status.py b/infrastructure-provisioning/src/ssn/scripts/resource_status.py
index 7146076..2ff4a0d 100644
--- a/infrastructure-provisioning/src/ssn/scripts/resource_status.py
+++ b/infrastructure-provisioning/src/ssn/scripts/resource_status.py
@@ -24,6 +24,7 @@ import argparse
 import sys
 import yaml
 from pymongo import MongoClient
+from datalab.logger import logging
 
 path = "/etc/mongod.conf"
 outfile = "/etc/mongo_params.yml"
@@ -41,7 +42,7 @@ def read_yml_conf(path, section, param):
         result = config[section][param]
         return result
     except:
-        print("File does not exist")
+        logging.error("File does not exist")
         return ''
 
 
@@ -59,5 +60,5 @@ if __name__ == "__main__":
     try:
         update_resource_status(args.resource, args.status)
     except:
-        print("Unable to update status for the resource {}".format(args.resource))
+        logging.error("Unable to update status for the resource {}".format(args.resource))
         sys.exit(1)
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/ssn/scripts/restore.py b/infrastructure-provisioning/src/ssn/scripts/restore.py
index 9cb9a98..b38fcc0 100644
--- a/infrastructure-provisioning/src/ssn/scripts/restore.py
+++ b/infrastructure-provisioning/src/ssn/scripts/restore.py
@@ -28,6 +28,7 @@ import sys
 import yaml
 import subprocess
 from fabric import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser(description="Restore script for DataLab configs, keys, certs, jars & database")
 parser.add_argument('--datalab_path', type=str, default='/opt/datalab/', help='Path to DataLab. Default: /opt/datalab/')
@@ -60,7 +61,7 @@ def ask(question):
             else:
                 return False
         except:
-            print("Incorrect answer. Try again...")
+            logging.error("Incorrect answer. Try again...")
             continue
 
 
@@ -70,30 +71,30 @@ def restore_prepare():
             head, tail = os.path.split(args.file)
             temp_folder = "/tmp/{}/".format(tail.split(".")[0])
             if os.path.isdir(temp_folder):
-                print("Temporary folder with this backup already exist.")
-                print("Use folder path '{}' in --file key".format(temp_folder))
+                logging.info("Temporary folder with this backup already exist.")
+                logging.info("Use folder path '{}' in --file key".format(temp_folder))
                 raise Exception
-            print("Backup acrhive will be unpacked to: {}".format(temp_folder))
+            logging.info("Backup acrhive will be unpacked to: {}".format(temp_folder))
             subprocess.run("mkdir {}".format(temp_folder), shell=True, check=True)
             subprocess.run("tar -xf {0} -C {1}".format(backup_file, temp_folder), shell=True, check=True)
         elif os.path.isdir(backup_file):
             temp_folder = backup_file
         else:
-            print("Please, specify file or folder. Try --help for more details.")
+            logging.info("Please, specify file or folder. Try --help for more details.")
             raise Exception
-        print("Backup acrhive: {} contains following files (exclude logs):".format(backup_file))
+        logging.info("Backup acrhive: {} contains following files (exclude logs):".format(backup_file))
         subprocess.run("find {} -not -name '*log'".format(temp_folder), shell=True, check=True)
     except Exception as err:
-        print("Failed to open backup.{}".format(str(err)))
+        logging.error("Failed to open backup.{}".format(str(err)))
         sys.exit(1)
 
     try:
         if ask("Maybe you want to create backup of existing configuration before restoring?"):
             with settings(hide('everything')):
-                print("Creating new backup...")
+                logging.info("Creating new backup...")
                 subprocess.run("python3 backup.py --configs all --keys all --certs all --jar all --db", shell=True, check=True)
     except:
-        print("Failed to create new backup.")
+        logging.error("Failed to create new backup.")
         sys.exit(1)
 
     try:
@@ -102,7 +103,7 @@ def restore_prepare():
         else:
             raise Exception
     except:
-        print("Failed to stop all services. Can not continue.")
+        logging.error("Failed to stop all services. Can not continue.")
         sys.exit(1)
 
     return temp_folder
@@ -111,7 +112,7 @@ def restore_prepare():
 def restore_configs():
     try:
         if not os.path.isdir("{0}{1}".format(temp_folder, conf_folder)):
-            print("Config files are not available in this backup.")
+            logging.info("Config files are not available in this backup.")
             raise Exception
 
         configs = list()
@@ -119,12 +120,12 @@ def restore_configs():
             configs = [files for root, dirs, files in os.walk("{0}{1}".format(temp_folder, conf_folder))][0]
         else:
             configs = args.configs.split(",")
-        print("Restore configs: {}".format(configs))
+        logging.info("Restore configs: {}".format(configs))
 
         if args.configs != "skip":
             for filename in configs:
                 if not os.path.isfile("{0}{1}{2}".format(temp_folder, conf_folder, filename)):
-                    print("Config {} are not available in this backup.".format(filename))
+                    logging.info("Config {} are not available in this backup.".format(filename))
                 else:
                     if os.path.isfile("{0}{1}{2}".format(args.datalab_path, conf_folder, filename)):
                         backupfile = "{0}{1}{2}".format(temp_folder, conf_folder, filename)
@@ -133,20 +134,20 @@ def restore_configs():
                             if ask("Config {} was changed, rewrite it?".format(filename)):
                                 subprocess.run("cp -f {0} {1}".format(backupfile, destfile), shell=True, check=True)
                             else:
-                                print("Config {} was skipped.".format(destfile))
+                                logging.info("Config {} was skipped.".format(destfile))
                         else:
-                            print("Config {} was not changed. Skipped.".format(filename))
+                            logging.info("Config {} was not changed. Skipped.".format(filename))
                     else:
-                        print("Config {} does not exist. Creating.".format(filename))
+                        logging.info("Config {} does not exist. Creating.".format(filename))
                         subprocess.run("cp {0}{1}{2} {3}{1}{2}".format(temp_folder, conf_folder, filename, args.datalab_path), shell=True, check=True)
     except:
-        print("Restore configs failed.")
+        logging.error("Restore configs failed.")
 
 
 def restore_keys():
     try:
         if not os.path.isdir("{}keys".format(temp_folder)):
-            print("Key files are not available in this backup.")
+            logging.info("Key files are not available in this backup.")
             raise Exception
 
         keys = list()
@@ -154,33 +155,33 @@ def restore_keys():
             keys = [files for root, dirs, files in os.walk("{}keys".format(temp_folder))][0]
         else:
             keys = args.keys.split(",")
-        print("Restore keys: {}".format(keys))
+        logging.info("Restore keys: {}".format(keys))
 
         if args.keys != "skip":
             for filename in keys:
                 if not os.path.isfile("{0}keys/{1}".format(temp_folder, filename)):
-                    print("Key {} are not available in this backup.".format(filename))
+                    logging.info("Key {} are not available in this backup.".format(filename))
                 else:
                     if os.path.isfile("{0}{1}".format(keys_folder, filename)):
-                        print("Key {} already exist.".format(filename))
+                        logging.info("Key {} already exist.".format(filename))
                         if not filecmp.cmp("{0}keys/{1}".format(temp_folder, filename), "{0}{1}".format(keys_folder, filename)):
                             if ask("Key {} was changed, rewrite it?".format(filename)):
                                 subprocess.run("cp -f {0}keys/{2} {1}{2}".format(temp_folder, keys_folder, filename), shell=True, check=True)
                             else:
-                                print("Key {} was skipped.".format(filename))
+                                logging.info("Key {} was skipped.".format(filename))
                         else:
-                            print("Key {} was not changed. Skipped.".format(filename))
+                            logging.info("Key {} was not changed. Skipped.".format(filename))
                     else:
-                        print("Key {} does not exist. Creating.".format(filename))
+                        logging.info("Key {} does not exist. Creating.".format(filename))
                         subprocess.run("cp {0}keys/{2} {1}{2}".format(temp_folder, keys_folder, filename), shell=True, check=True)
     except:
-        print("Restore keys failed.")
+        logging.error("Restore keys failed.")
 
 
 def restore_certs():
     try:
         if not os.path.isdir("{}certs".format(temp_folder)):
-            print("Cert files are not available in this backup.")
+            logging.info("Cert files are not available in this backup.")
             raise Exception
 
         certs = list()
@@ -188,35 +189,35 @@ def restore_certs():
             certs = [files for root, dirs, files in os.walk("{}certs".format(temp_folder))][0]
         else:
             certs = args.certs.split(",")
-        print("Restore certs: {}".format(certs))
+        logging.info("Restore certs: {}".format(certs))
 
         if args.certs != "skip":
             for filename in certs:
                 if not os.path.isfile("{0}certs/{1}".format(temp_folder, filename)):
-                    print("Cert {} are not available in this backup.".format(filename))
+                    logging.info("Cert {} are not available in this backup.".format(filename))
                 else:
                     if os.path.isfile("{0}{1}".format(certs_folder, filename)):
-                        print("Cert {} already exist.".format(filename))
+                        logging.info("Cert {} already exist.".format(filename))
                         if not filecmp.cmp("{0}certs/{1}".format(temp_folder, filename), "{0}{1}".format(certs_folder, filename)):
                             if ask("Cert {} was changed, rewrite it?".format(filename)):
                                 subprocess.run("sudo cp -f {0}certs/{2} {1}{2}".format(temp_folder, certs_folder, filename), shell=True, check=True)
                                 subprocess.run("sudo chown {0}:{0} {1}{2}".format("root", certs_folder, filename), shell=True, check=True)
                             else:
-                                print("Cert {} was skipped.".format(filename))
+                                logging.info("Cert {} was skipped.".format(filename))
                         else:
-                            print("Cert {} was not changed. Skipped.".format(filename))
+                            logging.info("Cert {} was not changed. Skipped.".format(filename))
                     else:
-                        print("Cert {} does not exist. Creating.".format(filename))
+                        logging.info("Cert {} does not exist. Creating.".format(filename))
                         subprocess.run("sudo cp {0}certs/{2} {1}{2}".format(temp_folder, certs_folder, filename), shell=True, check=True)
                         subprocess.run("sudo chown {0}:{0} {1}{2}".format("root", certs_folder, filename), shell=True, check=True)
     except:
-        print("Restore certs failed.")
+        logging.error("Restore certs failed.")
 
 
 def restore_jars():
     try:
         if not os.path.isdir("{0}jars".format(temp_folder)):
-            print("Jar files are not available in this backup.")
+            logging.info("Jar files are not available in this backup.")
             raise Exception
 
         jars = list()
@@ -224,12 +225,12 @@ def restore_jars():
             jars = [dirs for root, dirs, files in os.walk("{}jars".format(temp_folder))][0]
         else:
             jars = args.jars.split(",")
-        print("Restore jars: {}".format(jars))
+        logging.info("Restore jars: {}".format(jars))
 
         if args.jars != "skip":
             for service in jars:
                 if not os.path.isdir("{0}jars/{1}".format(temp_folder, service)):
-                    print("Jar {} are not available in this backup.".format(service))
+                    logging.info("Jar {} are not available in this backup.".format(service))
                 else:
                     for root, dirs, files in os.walk("{0}jars/{1}".format(temp_folder, service)):
                         for filename in files:
@@ -241,36 +242,36 @@ def restore_jars():
                                     if ask("Jar {} was changed, rewrite it?".format(filename)):
                                         subprocess.run("cp -fP {0} {1}".format(backupfile, destfile), shell=True, check=True)
                                     else:
-                                        print("Jar {} was skipped.".format(destfile))
+                                        logging.info("Jar {} was skipped.".format(destfile))
                                 else:
-                                    print("Jar {} was not changed. Skipped.".format(filename))
+                                    logging.info("Jar {} was not changed. Skipped.".format(filename))
                             else:
-                                print("Jar {} does not exist. Creating.".format(filename))
+                                logging.info("Jar {} does not exist. Creating.".format(filename))
                                 subprocess.run("cp -P {0}jars/{1}/{2} {3}{4}{1}".format(temp_folder, service, filename,
                                                                                args.datalab_path, jars_folder), shell=True, check=True)
     except:
-        print("Restore jars failed.")
+        logging.error("Restore jars failed.")
 
 
 def restore_database():
     try:
-        print("Restore database: {}".format(args.db))
+        logging.info("Restore database: {}".format(args.db))
         if args.db:
             if not os.path.isfile("{0}{1}".format(temp_folder, "mongo.db")):
-                print("File {} are not available in this backup.".format("mongo.db"))
+                logging.info("File {} are not available in this backup.".format("mongo.db"))
                 raise Exception
             else:
                 if ask("Do you want to drop existing database and restore another from backup?"):
                     ssn_conf = open(args.datalab_path + conf_folder + 'ssn.yml').read()
                     data = yaml.load("mongo" + ssn_conf.split("mongo")[-1])
-                    print("Restoring database from backup")
+                    logging.info("Restoring database from backup")
                     subprocess.run("mongorestore --drop --host {0} --port {1} --archive={2}/mongo.db --username {3} --password '{4}' --authenticationDatabase={5}" \
                             .format(data['mongo']['host'], data['mongo']['port'], temp_folder,
                                     data['mongo']['username'], data['mongo']['password'], data['mongo']['database']), shell=True, check=True)
         else:
-            print("Restore database was skipped.")
+            logging.info("Restore database was skipped.")
     except:
-        print("Restore database failed.")
+        logging.error("Restore database failed.")
 
 
 def restore_finalize():
@@ -278,13 +279,13 @@ def restore_finalize():
         if ask("Start all services after restoring?"):
             subprocess.run("sudo supervisorctl start all", shell=True, check=True)
     except:
-        print("Failed to start all services.")
+        logging.error("Failed to start all services.")
 
     try:
         if ask("Clean temporary folder {}?".format(temp_folder)) and temp_folder != "/":
             subprocess.run("rm -rf {}".format(temp_folder), shell=True, check=True)
     except Exception as err:
-        print("Clear temp folder failed. {}".format(str(err)))
+        logging.error("Clear temp folder failed. {}".format(str(err)))
 
 
 if __name__ == "__main__":
@@ -308,4 +309,4 @@ if __name__ == "__main__":
     # Starting services & cleaning tmp folder
     restore_finalize()
 
-    print("Restore is finished. Good luck.")
\ No newline at end of file
+    logging.info("Restore is finished. Good luck.")
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/ssn/scripts/upload_response_file.py b/infrastructure-provisioning/src/ssn/scripts/upload_response_file.py
index 34708c3..b61756f 100644
--- a/infrastructure-provisioning/src/ssn/scripts/upload_response_file.py
+++ b/infrastructure-provisioning/src/ssn/scripts/upload_response_file.py
@@ -22,7 +22,7 @@
 # ******************************************************************************
 
 import argparse
-import logging
+from datalab.logger import logging
 import sys
 from datalab.ssn_lib import *
 from datalab.fab import *
@@ -36,7 +36,7 @@ args = parser.parse_args()
 
 
 def upload_response_file(instance_name, local_log_filepath, os_user):
-    print('Connect to SSN instance with hostname: {0} and name: {1}'.format(args.instance_hostname, instance_name))
+    logging.info('Connect to SSN instance with hostname: {0} and name: {1}'.format(args.instance_hostname, instance_name))
     pkey = "{}{}.pem".format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
     global conn
     conn = datalab.fab.init_datalab_connection(args.instance_hostname, os_user, pkey)
@@ -49,12 +49,12 @@ def upload_response_file(instance_name, local_log_filepath, os_user):
         conn.close()
         return True
     except:
-        print('Failed to upload response file')
+        logging.error('Failed to upload response file')
         return False
 
 
 if __name__ == "__main__":
-    print("Uploading response file")
+    logging.info("Uploading response file")
     if not upload_response_file(args.instance_name, args.local_log_filepath, args.os_user):
         logging.error('Failed to upload response file')
         sys.exit(1)
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/superset/scripts/configure_superset_node.py b/infrastructure-provisioning/src/superset/scripts/configure_superset_node.py
index 5122ab4..f2a7db2 100644
--- a/infrastructure-provisioning/src/superset/scripts/configure_superset_node.py
+++ b/infrastructure-provisioning/src/superset/scripts/configure_superset_node.py
@@ -27,6 +27,7 @@ import sys
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.notebook_lib import *
+from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--hostname', type=str, default='')
@@ -50,35 +51,35 @@ gitlab_certfile = os.environ['conf_gitlab_certfile']
 # Run script #
 ##############
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     global conn
     conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
 
     # PREPARE DISK
-    print("Prepare .ensure directory")
+    logging.info("Prepare .ensure directory")
     try:
         if not exists(conn,'/home/' + args.os_user + '/.ensure_dir'):
             conn.sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
     except:
         sys.exit(1)
-    #print("Mount additional volume")
+    #logging.info("Mount additional volume")
     #prepare_disk(args.os_user)
 
     # INSTALL DOCKER COMPOSE
-    print("Installing docker compose")
+    logging.info("Installing docker compose")
     if not ensure_docker_compose(args.os_user):
         sys.exit(1)
 
     # INSTALL UNGIT
-    print("Install nodejs")
+    logging.info("Install nodejs")
     install_nodejs(args.os_user)
-    print("Install ungit")
+    logging.info("Install ungit")
     install_ungit(args.os_user, args.superset_name, args.edge_instance_private_ip)
     if exists(conn, '/home/{0}/{1}'.format(args.os_user, gitlab_certfile)):
         install_gitlab_cert(args.os_user, gitlab_certfile)
 
         # INSTALL INACTIVITY CHECKER
-    print("Install inactivity checker")
+    logging.info("Install inactivity checker")
     install_inactivity_checker(args.os_user, args.ip_address)
 
     # PREPARE SUPERSET
diff --git a/infrastructure-provisioning/src/tensor-rstudio/scripts/configure_tensor-rstudio_node.py b/infrastructure-provisioning/src/tensor-rstudio/scripts/configure_tensor-rstudio_node.py
index e1b7cf4..4772035 100644
--- a/infrastructure-provisioning/src/tensor-rstudio/scripts/configure_tensor-rstudio_node.py
+++ b/infrastructure-provisioning/src/tensor-rstudio/scripts/configure_tensor-rstudio_node.py
@@ -28,6 +28,7 @@ from datalab.actions_lib import *
 from datalab.common_lib import *
 from datalab.fab import *
 from datalab.notebook_lib import *
+from datalab.logger import logging
 from fabric import *
 from patchwork.files import exists
 from patchwork import files
@@ -75,68 +76,68 @@ r_libs = ['R6', 'pbdZMQ={}'.format(os.environ['notebook_pbdzmq_version']), 'RCur
 # Run script #
 ##############
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     global conn
     conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
 
     # PREPARE DISK
-    print("Prepare .ensure directory")
+    logging.info("Prepare .ensure directory")
     try:
         if not exists(conn,'/home/' + args.os_user + '/.ensure_dir'):
             conn.sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
     except:
         sys.exit(1)
-    print("Mount additional volume")
+    logging.info("Mount additional volume")
     prepare_disk(args.os_user)
 
     # INSTALL LANGUAGES
-    print("Install Java")
+    logging.info("Install Java")
     ensure_jre_jdk(args.os_user)
-    print("Install R")
+    logging.info("Install R")
     ensure_r(args.os_user, r_libs)
-    print("Install Python 3 modules")
+    logging.info("Install Python 3 modules")
     ensure_python3_libraries(args.os_user)
 
     # INSTALL PYTHON IN VIRTUALENV
-    print("Configure Python Virtualenv")
+    logging.info("Configure Python Virtualenv")
     ensure_python_venv(python_venv_version)
 
     # INSTALL TENSORFLOW AND OTHER DEEP LEARNING LIBRARIES
-    print("Install TensorFlow")
+    logging.info("Install TensorFlow")
     install_tensor(args.os_user, cuda_version, cuda_file_name,
                    cudnn_version, cudnn_file_name, tensorflow_version,
                    templates_dir, nvidia_version)
-    print("Install Theano")
+    logging.info("Install Theano")
     install_theano(args.os_user, theano_version)
-    print("Installing Keras")
+    logging.info("Installing Keras")
     install_keras(args.os_user, keras_version)
 
     # INSTALL RSTUDIO
-    print("Install RStudio")
+    logging.info("Install RStudio")
     install_rstudio(args.os_user, local_spark_path, args.rstudio_pass, args.rstudio_version, python_venv_version)
 
     # INSTALL SPARK AND CLOUD STORAGE JARS FOR SPARK
-    print("Install local Spark")
+    logging.info("Install local Spark")
     ensure_local_spark(args.os_user, spark_link, spark_version, hadoop_version, local_spark_path )
-    print("Install storage jars")
+    logging.info("Install storage jars")
     ensure_local_jars(args.os_user, jars_dir)
-    print("Configure local Spark")
+    logging.info("Configure local Spark")
     configure_local_spark(jars_dir, templates_dir)
 
     # INSTALL UNGIT
-    print("Install nodejs")
+    logging.info("Install nodejs")
     install_nodejs(args.os_user)
-    print("Install Ungit")
+    logging.info("Install Ungit")
     install_ungit(args.os_user, args.exploratory_name, args.edge_ip)
     if exists(conn, '/home/{0}/{1}'.format(args.os_user, gitlab_certfile)):
         install_gitlab_cert(args.os_user, gitlab_certfile)
 
     # INSTALL INACTIVITY CHECKER
-    print("Install inactivity checker")
+    logging.info("Install inactivity checker")
     install_inactivity_checker(args.os_user, args.ip_address)
 
     # POST INSTALLATION PROCESS
-    print("Updating pyOpenSSL library")
+    logging.info("Updating pyOpenSSL library")
     update_pyopenssl_lib(args.os_user)
 
     conn.close()
diff --git a/infrastructure-provisioning/src/tensor/scripts/configure_tensor_node.py b/infrastructure-provisioning/src/tensor/scripts/configure_tensor_node.py
index c9b5e3f..6808936 100644
--- a/infrastructure-provisioning/src/tensor/scripts/configure_tensor_node.py
+++ b/infrastructure-provisioning/src/tensor/scripts/configure_tensor_node.py
@@ -28,6 +28,7 @@ from datalab.actions_lib import *
 from datalab.common_lib import *
 from datalab.fab import *
 from datalab.notebook_lib import *
+from datalab.logger import logging
 from fabric import *
 from patchwork.files import exists
 from patchwork import files
@@ -75,78 +76,78 @@ cudnn_file_name = os.environ['notebook_cudnn_file_name']
 # Run script #
 ##############
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     global conn
     conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
 
     # PREPARE DISK
-    print("Prepare .ensure directory")
+    logging.info("Prepare .ensure directory")
     try:
         if not exists(conn,'/home/' + args.os_user + '/.ensure_dir'):
             conn.sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
     except:
         sys.exit(1)
-    print("Mount additional volume")
+    logging.info("Mount additional volume")
     prepare_disk(args.os_user)
 
     # INSTALL LANGUAGES
-    print("Install Java")
+    logging.info("Install Java")
     ensure_jre_jdk(args.os_user)
-    print("Install Python 3 modules")
+    logging.info("Install Python 3 modules")
     ensure_python3_libraries(args.os_user)
 
     # INSTALL PYTHON IN VIRTUALENV
-    print("Configure Python Virtualenv")
+    logging.info("Configure Python Virtualenv")
     ensure_python_venv(python_venv_version)
 
     # INSTALL TENSORFLOW AND OTHER DEEP LEARNING LIBRARIES
-    print("Install TensorFlow")
+    logging.info("Install TensorFlow")
     install_tensor(args.os_user, cuda_version, cuda_file_name,
                    cudnn_version, cudnn_file_name, tensorflow_version,
                    templates_dir, nvidia_version)
-    print("Install Theano")
+    logging.info("Install Theano")
     install_theano(args.os_user, theano_version)
-    print("Installing Keras")
+    logging.info("Installing Keras")
     install_keras(args.os_user, keras_version)
 
     # INSTALL JUPYTER NOTEBOOK
-    print("Install Jupyter")
+    logging.info("Install Jupyter")
     configure_jupyter(args.os_user, jupyter_conf_file, templates_dir, jupyter_version, args.exploratory_name)
 
     # INSTALL SPARK AND CLOUD STORAGE JARS FOR SPARK
-    print("Install local Spark")
+    logging.info("Install local Spark")
     ensure_local_spark(args.os_user, spark_link, spark_version, hadoop_version, local_spark_path )
-    print("Install storage jars")
+    logging.info("Install storage jars")
     ensure_local_jars(args.os_user, jars_dir)
-    print("Configure local Spark")
+    logging.info("Configure local Spark")
     configure_local_spark(jars_dir, templates_dir)
 
     # INSTALL JUPYTER KERNELS
-    #print("Install pyspark local kernel for Jupyter")
+    #logging.info("Install pyspark local kernel for Jupyter")
     #ensure_pyspark_local_kernel(args.os_user, pyspark_local_path_dir, templates_dir, spark_version)
-    print("Install py3spark local kernel for Jupyter")
+    logging.info("Install py3spark local kernel for Jupyter")
     ensure_py3spark_local_kernel(args.os_user, py3spark_local_path_dir, templates_dir, spark_version, python_venv_path, python_venv_version)
 
     # INSTALL UNGIT
-    print("Install nodejs")
+    logging.info("Install nodejs")
     install_nodejs(args.os_user)
-    print("Install Ungit")
+    logging.info("Install Ungit")
     install_ungit(args.os_user, args.exploratory_name, args.edge_ip)
     if exists(conn, '/home/{0}/{1}'.format(args.os_user, gitlab_certfile)):
         install_gitlab_cert(args.os_user, gitlab_certfile)
 
     # INSTALL INACTIVITY CHECKER
-    print("Install inactivity checker")
+    logging.info("Install inactivity checker")
     install_inactivity_checker(args.os_user, args.ip_address)
 
     # INSTALL OPTIONAL PACKAGES
-    print("Installing additional Python packages")
+    logging.info("Installing additional Python packages")
     ensure_additional_python_libs(args.os_user)
-    print("Install Matplotlib")
+    logging.info("Install Matplotlib")
     ensure_matplot(args.os_user)
     
     #POST INSTALLATION PROCESS
-    print("Updating pyOpenSSL library")
+    logging.info("Updating pyOpenSSL library")
     update_pyopenssl_lib(args.os_user)
 
     conn.close()
diff --git a/infrastructure-provisioning/src/zeppelin/scripts/configure_zeppelin_node.py b/infrastructure-provisioning/src/zeppelin/scripts/configure_zeppelin_node.py
index 896bc08..5709cf5 100644
--- a/infrastructure-provisioning/src/zeppelin/scripts/configure_zeppelin_node.py
+++ b/infrastructure-provisioning/src/zeppelin/scripts/configure_zeppelin_node.py
@@ -28,6 +28,7 @@ import sys
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.notebook_lib import *
+from datalab.logger import logging
 from fabric import *
 from patchwork.files import exists
 from patchwork import files
@@ -105,7 +106,7 @@ def configure_zeppelin(os_user):
             conn.sudo('cp /opt/zeppelin-' + zeppelin_version + '-bin-netinst/interpreter/md/zeppelin-markdown-*.jar /opt/zeppelin/lib/interpreter/') # necessary when executing paragraph launches java process with "-cp :/opt/zeppelin/lib/interpreter/*:"
             conn.sudo('cp /opt/zeppelin-' + zeppelin_version + '-bin-netinst/interpreter/sh/zeppelin-shell-*.jar /opt/zeppelin/lib/interpreter/')
         except Exception as err:
-            print('Error:', str(err))
+            logging.error('Error:', str(err))
             sys.exit(1)
         try:
             conn.put(templates_dir + 'zeppelin-notebook.service', '/tmp/zeppelin-notebook.service')
@@ -126,7 +127,7 @@ def configure_zeppelin(os_user):
             conn.sudo('''bash -l -c 'echo \"d /var/run/zeppelin 0755 {}\" > /usr/lib/tmpfiles.d/zeppelin.conf' '''.format(os_user))
             conn.sudo('touch /home/' + os_user + '/.ensure_dir/zeppelin_ensured')
         except Exception as err:
-            print('Error:', str(err))
+            logging.error('Error:', str(err))
             sys.exit(1)
 
 
@@ -203,64 +204,64 @@ def install_local_livy(args):
 # Run script #
 ##############
 if __name__ == "__main__":
-    print("Configure connections")
+    logging.info("Configure connections")
     global conn
     conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
     deeper_config = json.loads(args.additional_config)
 
     # PREPARE DISK
-    print("Prepare .ensure directory")
+    logging.info("Prepare .ensure directory")
     try:
         if not exists(conn,'/home/' + args.os_user + '/.ensure_dir'):
             conn.sudo('mkdir /home/' + args.os_user + '/.ensure_dir')
     except:
         sys.exit(1)
-    print("Mount additional volume")
+    logging.info("Mount additional volume")
     prepare_disk(args.os_user)
 
     # INSTALL LANGUAGES
-    print("Install Java")
+    logging.info("Install Java")
     ensure_jre_jdk(args.os_user)
-    print("Installing Scala")
+    logging.info("Installing Scala")
     ensure_scala(scala_link, args.scala_version, args.os_user)
     if os.environ['notebook_r_enabled'] == 'true':
-        print("Installing R")
+        logging.info("Installing R")
         ensure_r(args.os_user, r_libs)
-    print("Install Python 3 modules")
+    logging.info("Install Python 3 modules")
     ensure_python3_libraries(args.os_user)
 
     # INSTALL PYTHON IN VIRTUALENV
-    print("Configure Python Virtualenv")
+    logging.info("Configure Python Virtualenv")
     ensure_python_venv(python_venv_version)
-    #print("Install Python 3 specific version")
+    #logging.info("Install Python 3 specific version")
     #ensure_python3_specific_version(python3_version, args.os_user)
 
     # INSTALL SPARK AND CLOUD STORAGE JARS FOR SPARK
-    print("Install local Spark")
+    logging.info("Install local Spark")
     ensure_local_spark(args.os_user, spark_link, args.spark_version, args.hadoop_version, local_spark_path)
-    print("Install storage jars")
+    logging.info("Install storage jars")
     ensure_local_jars(args.os_user, jars_dir)
-    print("Configure local Spark")
+    logging.info("Configure local Spark")
     configure_local_spark(jars_dir, templates_dir)
 
     # INSTALL ZEPPELIN
-    print("Install Zeppelin")
+    logging.info("Install Zeppelin")
     configure_zeppelin(args.os_user)
 
     # INSTALL ZEPPELIN KERNELS
     if args.multiple_clusters == 'true':
-        print("Installing Livy for local kernels")
+        logging.info("Installing Livy for local kernels")
         install_local_livy(args)
-        print("Configuring local kernels")
+        logging.info("Configuring local kernels")
         configure_local_livy_kernels(args)
     else:
-        print("Configuring local kernels")
+        logging.info("Configuring local kernels")
         configure_local_spark_kernels(args, python_venv_path)
 
     # INSTALL UNGIT
-    print("Install nodejs")
+    logging.info("Install nodejs")
     install_nodejs(args.os_user)
-    print("Install Ungit")
+    logging.info("Install Ungit")
     install_ungit(args.os_user, args.exploratory_name, args.edge_ip)
     if exists(conn, '/home/{0}/{1}'.format(args.os_user, gitlab_certfile)):
         install_gitlab_cert(args.os_user, gitlab_certfile)
@@ -268,20 +269,20 @@ if __name__ == "__main__":
     conn.sudo('cp /home/{}/.git/templates/hooks/pre-commit /opt/zeppelin/notebook/.git/hooks/'.format(args.os_user))
 
     # INSTALL INACTIVITY CHECKER
-    print("Install inactivity checker")
+    logging.info("Install inactivity checker")
     install_inactivity_checker(args.os_user, args.ip_address)
 
     # INSTALL OPTIONAL PACKAGES
     if os.environ['notebook_r_enabled'] == 'true':
-        print("Install additional R packages")
+        logging.info("Install additional R packages")
         install_r_packages(args.os_user)
-    print("Install additional Python packages")
+    logging.info("Install additional Python packages")
     ensure_additional_python_libs(args.os_user)
-    print("Install Matplotlib.")
+    logging.info("Install Matplotlib.")
     ensure_matplot(args.os_user)
     
     #POST INSTALLATION PROCESS
-    print("Updating pyOpenSSL library")
+    logging.info("Updating pyOpenSSL library")
     update_pyopenssl_lib(args.os_user)
 
     conn.close()

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org