You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@datalab.apache.org by lf...@apache.org on 2021/10/19 09:13:40 UTC

[incubator-datalab] branch DATALAB-2409 updated: Revert "[DATALAB-2409]: replaced print with logging in all general/scripts/os .py scripts"

This is an automated email from the ASF dual-hosted git repository.

lfrolov pushed a commit to branch DATALAB-2409
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git


The following commit(s) were added to refs/heads/DATALAB-2409 by this push:
     new d5634c0  Revert "[DATALAB-2409]: replaced print with logging in all general/scripts/os .py scripts"
d5634c0 is described below

commit d5634c07c4a2f0179b2bbb78023c3f457afcc733
Author: leonidfrolov <fr...@gmail.com>
AuthorDate: Tue Oct 19 12:09:29 2021 +0300

    Revert "[DATALAB-2409]: replaced print with logging in all general/scripts/os .py scripts"
    
    This reverts commit 0c304cc92787ac40eb7f59806848de0a5d6f3cd8.
    
    reverted commit for general script os files
---
 .../src/general/scripts/os/common_clean_instance.py    | 17 ++++++++---------
 .../src/general/scripts/os/common_configure_proxy.py   |  5 ++---
 .../scripts/os/common_configure_reverse_proxy.py       |  7 +++----
 .../src/general/scripts/os/common_configure_spark.py   |  7 +++----
 .../general/scripts/os/configure_proxy_for_docker.py   |  7 +++----
 .../src/general/scripts/os/dataengine_install_libs.py  | 14 +++++++++++---
 .../src/general/scripts/os/dataengine_list_libs.py     | 14 +++++++++++---
 .../general/scripts/os/dataengine_reconfigure_spark.py | 18 +++++++++++++-----
 .../src/general/scripts/os/get_list_available_pkgs.py  |  5 ++---
 .../src/general/scripts/os/install_additional_libs.py  | 17 ++++++++---------
 .../scripts/os/jupyter_install_dataengine_kernels.py   |  3 +--
 .../general/scripts/os/jupyterlab_container_start.py   |  7 +++----
 .../src/general/scripts/os/manage_git_creds.py         |  9 ++++-----
 .../src/general/scripts/os/notebook_git_creds.py       | 12 ++++++++++--
 .../general/scripts/os/notebook_inactivity_check.py    | 13 ++++++++++---
 .../src/general/scripts/os/notebook_install_libs.py    | 14 +++++++++++---
 .../src/general/scripts/os/notebook_list_libs.py       | 14 +++++++++++---
 .../general/scripts/os/notebook_reconfigure_spark.py   | 18 +++++++++++++-----
 .../scripts/os/rstudio_dataengine_create_configs.py    |  5 ++---
 .../src/general/scripts/os/superset_start.py           |  7 +++----
 .../os/tensor-rstudio_dataengine_create_configs.py     |  5 ++---
 .../scripts/os/tensor_install_dataengine_kernels.py    |  3 +--
 .../scripts/os/zeppelin_dataengine_create_configs.py   |  3 +--
 23 files changed, 136 insertions(+), 88 deletions(-)

diff --git a/infrastructure-provisioning/src/general/scripts/os/common_clean_instance.py b/infrastructure-provisioning/src/general/scripts/os/common_clean_instance.py
index 99e7904..a9d370e 100644
--- a/infrastructure-provisioning/src/general/scripts/os/common_clean_instance.py
+++ b/infrastructure-provisioning/src/general/scripts/os/common_clean_instance.py
@@ -25,7 +25,6 @@ import argparse
 import os
 import sys
 from datalab.notebook_lib import *
-from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -49,7 +48,7 @@ def general_clean():
         remove_os_pkg(['nodejs', 'npm'])
         conn.sudo('sed -i "/spark.*.memory/d" /opt/spark/conf/spark-defaults.conf')
     except Exception as err:
-        logging.error('Error: {0}'.format(err))
+        print('Error: {0}'.format(err))
         sys.exit(1)
 
 
@@ -65,7 +64,7 @@ def clean_jupyter():
         conn.sudo('rm -f /etc/systemd/system/jupyter-notebook.service')
         conn.sudo('systemctl daemon-reload')
     except Exception as err:
-        logging.error('Error: {0}'.format(err))
+        print('Error: {0}'.format(err))
         sys.exit(1)
 
 
@@ -80,7 +79,7 @@ def clean_zeppelin():
         conn.sudo('rm -f /etc/systemd/system/zeppelin-notebook.service')
         conn.sudo('systemctl daemon-reload')
     except Exception as err:
-        logging.error('Error: {0}'.format(err))
+        print('Error: {0}'.format(err))
         sys.exit(1)
 
 
@@ -90,7 +89,7 @@ def clean_rstudio():
         conn.sudo('rm -f /home/{}/.Rprofile'.format(args.os_user))
         conn.sudo('rm -f /home/{}/.Renviron'.format(args.os_user))
     except Exception as err:
-        logging.error('Error:', str(err))
+        print('Error:', str(err))
         sys.exit(1)
 
 
@@ -101,7 +100,7 @@ def clean_tensor():
         conn.sudo('systemctl disable tensorboard')
         conn.sudo('systemctl daemon-reload')
     except Exception as err:
-        logging.error('Error: {0}'.format(err))
+        print('Error: {0}'.format(err))
         sys.exit(1)
 
 
@@ -112,12 +111,12 @@ def clean_tensor_rstudio():
         conn.sudo('systemctl disable tensorboard')
         conn.sudo('systemctl daemon-reload')
     except Exception as err:
-        logging.error('Error: {0}'.format(err))
+        print('Error: {0}'.format(err))
         sys.exit(1)
 
 
 if __name__ == "__main__":
-    logging.info('Configure connections')
+    print('Configure connections')
     global conn
     conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
 
@@ -156,6 +155,6 @@ if __name__ == "__main__":
             elif args.application == ('tensor-rstudio'):
                 clean_tensor_rstudio()
     else:
-        logging.info('Found default ami, do not make clean')
+        print('Found default ami, do not make clean')
     #conn.close()
     sys.exit(0)
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/general/scripts/os/common_configure_proxy.py b/infrastructure-provisioning/src/general/scripts/os/common_configure_proxy.py
index 19666df..604a23a 100644
--- a/infrastructure-provisioning/src/general/scripts/os/common_configure_proxy.py
+++ b/infrastructure-provisioning/src/general/scripts/os/common_configure_proxy.py
@@ -25,7 +25,6 @@ import argparse
 import json
 from datalab.notebook_lib import *
 from datalab.fab import *
-from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -41,12 +40,12 @@ args = parser.parse_args()
 # Run script #
 ##############
 if __name__ == "__main__":
-    logging.info("Configure connections")
+    print("Configure connections")
     global conn
     conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
     deeper_config = json.loads(args.additional_config)
 
-    logging.info("Enabling proxy for notebook server for repositories access.")
+    print("Enabling proxy for notebook server for repositories access.")
     datalab.notebook_lib.enable_proxy(deeper_config['proxy_host'], deeper_config['proxy_port'])
 
     conn.close()
diff --git a/infrastructure-provisioning/src/general/scripts/os/common_configure_reverse_proxy.py b/infrastructure-provisioning/src/general/scripts/os/common_configure_reverse_proxy.py
index f43a3a9..38ea331 100644
--- a/infrastructure-provisioning/src/general/scripts/os/common_configure_reverse_proxy.py
+++ b/infrastructure-provisioning/src/general/scripts/os/common_configure_reverse_proxy.py
@@ -26,7 +26,6 @@ import json
 import sys
 from datalab.fab import *
 from datalab.meta_lib import get_instance_private_ip_address
-from datalab.logger import logging
 from fabric import *
 from jinja2 import Environment, FileSystemLoader
 from datalab.fab import *
@@ -97,15 +96,15 @@ def make_template():
 # Run script #
 ##############
 if __name__ == "__main__":
-    logging.info("Make template")
+    print("Make template")
 
     try:
         conf_file_name = make_template()
     except Exception as err:
-        logging.error('Error: {0}'.format(err))
+        print('Error: {0}'.format(err))
         sys.exit(1)
 
-    logging.info("Configure connections")
+    print("Configure connections")
     global conn
     conn = datalab.fab.init_datalab_connection(args.edge_hostname, args.os_user, args.keyfile)
     conn.put('/tmp/{}.conf'.format(conf_file_name), '/tmp/{}.conf'.format(conf_file_name))
diff --git a/infrastructure-provisioning/src/general/scripts/os/common_configure_spark.py b/infrastructure-provisioning/src/general/scripts/os/common_configure_spark.py
index 21477c8..47f7b78 100644
--- a/infrastructure-provisioning/src/general/scripts/os/common_configure_spark.py
+++ b/infrastructure-provisioning/src/general/scripts/os/common_configure_spark.py
@@ -28,7 +28,6 @@ import sys
 import time
 from datalab.fab import *
 from datalab.notebook_lib import *
-from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -48,7 +47,7 @@ def update_spark_defaults_conf(spark_conf):
             conn.sudo('''sed -i '/^# Updated/d' {0}'''.format(conf))
             conn.sudo('''echo "# Updated by DATALAB at {0} >> {1}'''.format(timestamp, conf))
     except Exception as err:
-        logging.error('Error: {0}'.format(err))
+        print('Error: {0}'.format(err))
         sys.exit(1)
 
 
@@ -77,12 +76,12 @@ def add_custom_spark_properties(cluster_name):
                 conn.sudo('echo "{0}" >> /opt/{1}/spark/conf/spark-defaults.conf'.format(prop, cluster_name))
             conn.sudo('sed -i "/^\s*$/d" /opt/{0}/spark/conf/spark-defaults.conf'.format(cluster_name))
     except Exception as err:
-        logging.error('Error: {0}'.format(err))
+        print('Error: {0}'.format(err))
         sys.exit(1)
 
 
 if __name__ == "__main__":
-    logging.info('Configure connections')
+    print('Configure connections')
     global conn
     conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
 
diff --git a/infrastructure-provisioning/src/general/scripts/os/configure_proxy_for_docker.py b/infrastructure-provisioning/src/general/scripts/os/configure_proxy_for_docker.py
index 72c1c9c..a233750 100644
--- a/infrastructure-provisioning/src/general/scripts/os/configure_proxy_for_docker.py
+++ b/infrastructure-provisioning/src/general/scripts/os/configure_proxy_for_docker.py
@@ -25,7 +25,6 @@ import argparse
 import sys
 from fabric import *
 from datalab.fab import *
-from datalab.logger import logging
 
 parser = argparse.ArgumentParser()
 parser.add_argument('--hostname', type=str, default='')
@@ -37,10 +36,10 @@ http_file = '/etc/systemd/system/docker.service.d/http-proxy.conf'
 https_file = '/etc/systemd/system/docker.service.d/https-proxy.conf'
 
 if __name__ == "__main__":
-    logging.info("Configure connections")
+    print("Configure connections")
     global conn
     conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
-    logging.info("Configuring proxy for docker")
+    print("Configuring proxy for docker")
     try:
         conn.sudo('mkdir -p /etc/systemd/system/docker.service.d')
         conn.sudo('touch {}'.format(http_file))
@@ -57,6 +56,6 @@ if __name__ == "__main__":
         conn.sudo('update-rc.d docker enable')
         conn.sudo('systemctl restart docker')
     except Exception as err:
-        logging.error('Error: {0}'.format(err))
+        print('Error: {0}'.format(err))
         sys.exit(1)
     conn.close()
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/general/scripts/os/dataengine_install_libs.py b/infrastructure-provisioning/src/general/scripts/os/dataengine_install_libs.py
index 6ce8615..05f4885 100644
--- a/infrastructure-provisioning/src/general/scripts/os/dataengine_install_libs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/dataengine_install_libs.py
@@ -21,7 +21,7 @@
 #
 # ******************************************************************************
 
-from datalab.logger import logging
+import logging
 import multiprocessing
 import os
 import sys
@@ -44,14 +44,22 @@ def install_libs_on_slaves(slave, data_engine):
         # Run script to install additional libs
         subprocess.run("~/scripts/{}.py {}".format('install_additional_libs', params), shell=True, check=True)
     except Exception as err:
-        logging.error('Error: {0}'.format(err))
+        print('Error: {0}'.format(err))
         sys.exit(1)
 
 
 if __name__ == "__main__":
     instance_class = 'notebook'
+    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
+                                               os.environ['request_id'])
+    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
+    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
+                        level=logging.DEBUG,
+                        filename=local_log_filepath)
+
     try:
         logging.info('[INSTALLING ADDITIONAL LIBRARIES ON DATAENGINE]')
+        print('[INSTALLING ADDITIONAL LIBRARIES ON DATAENGINE]')
         data_engine = dict()
         try:
             data_engine['os_user'] = os.environ['conf_os_user']
@@ -92,6 +100,6 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        logging.error('Error: {0}'.format(err))
+        print('Error: {0}'.format(err))
         append_result("Failed to install additional libraries.", str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/os/dataengine_list_libs.py b/infrastructure-provisioning/src/general/scripts/os/dataengine_list_libs.py
index b52df61..2580279 100644
--- a/infrastructure-provisioning/src/general/scripts/os/dataengine_list_libs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/dataengine_list_libs.py
@@ -21,7 +21,7 @@
 #
 # ******************************************************************************
 
-from datalab.logger import logging
+import logging
 import os
 import sys
 import traceback
@@ -33,8 +33,16 @@ from fabric import *
 
 if __name__ == "__main__":
     instance_class = 'notebook'
+    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
+                                               os.environ['request_id'])
+    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
+    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
+                        level=logging.DEBUG,
+                        filename=local_log_filepath)
+
     try:
         logging.info('[GETTING AVAILABLE PACKAGES]')
+        print('[GETTING AVAILABLE PACKAGES]')
         data_engine = dict()
         try:
             data_engine['os_user'] = os.environ['conf_os_user']
@@ -47,7 +55,7 @@ if __name__ == "__main__":
                 data_engine['tag_name'], data_engine['master_node_name'])
             data_engine['keyfile'] = '{}{}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
         except Exception as err:
-            logging.error('Error: {0}'.format(err))
+            print('Error: {0}'.format(err))
             append_result("Failed to get parameter.", str(err))
             sys.exit(1)
         params = "--os_user {} --instance_ip {} --keyfile '{}' --group {}" \
@@ -59,6 +67,6 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        logging.error('Error: {0}'.format(err))
+        print('Error: {0}'.format(err))
         append_result("Failed to get available libraries.", str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/os/dataengine_reconfigure_spark.py b/infrastructure-provisioning/src/general/scripts/os/dataengine_reconfigure_spark.py
index 08a56a6..41e07b4 100644
--- a/infrastructure-provisioning/src/general/scripts/os/dataengine_reconfigure_spark.py
+++ b/infrastructure-provisioning/src/general/scripts/os/dataengine_reconfigure_spark.py
@@ -22,7 +22,7 @@
 # ******************************************************************************
 
 import json
-from datalab.logger import logging
+import logging
 import multiprocessing
 import os
 import sys
@@ -43,14 +43,22 @@ def install_libs_on_slaves(slave, data_engine):
         # Run script to install additional libs
         subprocess.run("~/scripts/{}.py {}".format('reconfigure_spark', params), shell=True, check=True)
     except Exception as err:
-        logging.error('Error: {0}'.format(err))
+        print('Error: {0}'.format(err))
         sys.exit(1)
 
 
 if __name__ == "__main__":
     instance_class = 'notebook'
+    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
+                                               os.environ['request_id'])
+    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
+    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
+                        level=logging.DEBUG,
+                        filename=local_log_filepath)
+
     try:
         logging.info('[RECONFIGURING SPARK ON DATAENGINE]')
+        print('[RECONFIGURING SPARK ON DATAENGINE]')
         data_engine = dict()
         try:
             data_engine['os_user'] = os.environ['conf_os_user']
@@ -103,7 +111,7 @@ if __name__ == "__main__":
             raise Exception
 
     except Exception as err:
-        logging.error('Error: {0}'.format(err))
+        print('Error: {0}'.format(err))
         append_result("Failed to reconfigure Spark.", str(err))
         sys.exit(1)
 
@@ -111,8 +119,8 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"service_base_name": data_engine['service_base_name'],
                    "Action": "Reconfigure Spark on Data Engine"}
-            logging.info(json.dumps(res))
+            print(json.dumps(res))
             result.write(json.dumps(res))
     except:
-        logging.error("Failed writing results.")
+        print("Failed writing results.")
         sys.exit(0)
diff --git a/infrastructure-provisioning/src/general/scripts/os/get_list_available_pkgs.py b/infrastructure-provisioning/src/general/scripts/os/get_list_available_pkgs.py
index 9d25eb7..8e33b20 100644
--- a/infrastructure-provisioning/src/general/scripts/os/get_list_available_pkgs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/get_list_available_pkgs.py
@@ -28,7 +28,6 @@ import time
 import xmlrpc.client
 from datalab.fab import *
 from datalab.notebook_lib import *
-from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -54,7 +53,7 @@ def get_available_pip_pkgs(version):
                 time.sleep(5)
                 continue
     except Exception as err:
-        logging.error('Error: {0}'.format(err))
+        print('Error: {0}'.format(err))
         sys.exit(1)
 
 
@@ -71,7 +70,7 @@ def get_uncategorised_pip_pkgs(all_pkgs_pip2, all_pkgs_pip3):
             pip_pkgs[pkg] = "N/A"
         return pip_pkgs
     except Exception as err:
-        logging.error('Error: {0}'.format(err))
+        print('Error: {0}'.format(err))
         sys.exit(1)
 
 
diff --git a/infrastructure-provisioning/src/general/scripts/os/install_additional_libs.py b/infrastructure-provisioning/src/general/scripts/os/install_additional_libs.py
index f18f0cb..c3661e4 100644
--- a/infrastructure-provisioning/src/general/scripts/os/install_additional_libs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/install_additional_libs.py
@@ -27,7 +27,6 @@ import json
 import sys
 from datalab.fab import *
 from datalab.notebook_lib import *
-from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -43,7 +42,7 @@ if __name__ == "__main__":
     global conn
     conn = datalab.fab.init_datalab_connection(args.instance_ip, args.os_user, args.keyfile)
 
-    logging.info('Installing libraries: {}'.format(args.libs))
+    print('Installing libraries: {}'.format(args.libs))
     general_status = list()
     data = ast.literal_eval(args.libs)
     pkgs = {"libraries": {}}
@@ -60,40 +59,40 @@ if __name__ == "__main__":
                 pkgs['libraries'][data[row]['group']].append(
                     [data[row]['name'], data[row]['version']])
     except Exception as err:
-        logging.error('Error: {0}'.format(err))
+        print('Error: {0}'.format(err))
         append_result("Failed to parse libs list.", str(err))
         sys.exit(1)
 
     try:
-        logging.info('Installing os packages: {}'.format(pkgs['libraries']['os_pkg']))
+        print('Installing os packages: {}'.format(pkgs['libraries']['os_pkg']))
         status = install_os_pkg(pkgs['libraries']['os_pkg'])
         general_status = general_status + status
     except KeyError:
         pass
 
     try:
-        logging.info('Installing java dependencies: {}'.format(pkgs['libraries']['java']))
+        print('Installing java dependencies: {}'.format(pkgs['libraries']['java']))
         status = install_java_pkg(pkgs['libraries']['java'])
         general_status = general_status + status
     except KeyError:
         pass
 
     #try:
-        #logging.info('Installing pip2 packages: {}'.format(pkgs['libraries']['pip2']))
+        #print('Installing pip2 packages: {}'.format(pkgs['libraries']['pip2']))
         #status = install_pip_pkg(pkgs['libraries']['pip2'], 'pip2', 'pip2', args.dataengine_service)
         #general_status = general_status + status
     #except KeyError:
         #pass
 
     try:
-        logging.info('Installing pip3 packages: {}'.format(pkgs['libraries']['pip3']))
+        print('Installing pip3 packages: {}'.format(pkgs['libraries']['pip3']))
         status = install_pip_pkg(pkgs['libraries']['pip3'], 'pip3', 'pip3', args.dataengine_service)
         general_status = general_status + status
     except KeyError:
         pass
 
     try:
-        logging.info('Installing other packages (only tries pip3): {}'.format(pkgs['libraries']['others']))
+        print('Installing other packages (only tries pip3): {}'.format(pkgs['libraries']['others']))
         for pkg in pkgs['libraries']['others']:
             status_pip3 = install_pip_pkg([pkg], 'pip3', 'others', args.dataengine_service)
             general_status = general_status + status_pip3
@@ -104,7 +103,7 @@ if __name__ == "__main__":
         and os.environ['notebook_r_enabled'] == 'true')\
             or os.environ['application'] in ('rstudio', 'tensor-rstudio'):
         try:
-            logging.info('Installing R packages: {}'.format(pkgs['libraries']['r_pkg']))
+            print('Installing R packages: {}'.format(pkgs['libraries']['r_pkg']))
             status = install_r_pkg(pkgs['libraries']['r_pkg'])
             general_status = general_status + status
         except KeyError:
diff --git a/infrastructure-provisioning/src/general/scripts/os/jupyter_install_dataengine_kernels.py b/infrastructure-provisioning/src/general/scripts/os/jupyter_install_dataengine_kernels.py
index 560ba1d..8fbc014 100644
--- a/infrastructure-provisioning/src/general/scripts/os/jupyter_install_dataengine_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/os/jupyter_install_dataengine_kernels.py
@@ -25,7 +25,6 @@ import argparse
 import os
 from datalab.fab import *
 from datalab.meta_lib import *
-from datalab.logger import logging
 from fabric import *
 from patchwork.files import exists
 from patchwork import files
@@ -108,7 +107,7 @@ def install_sparkamagic_kernels(args):
                 spark_master_ip, args.os_user))
         datalab.fab.conn.sudo('sudo chown -R {0}:{0} /home/{0}/.sparkmagic/'.format(args.os_user))
     except Exception as err:
-        logging.error(err)
+        print(err)
         sys.exit(1)
 
 def create_inactivity_log(master_ip):
diff --git a/infrastructure-provisioning/src/general/scripts/os/jupyterlab_container_start.py b/infrastructure-provisioning/src/general/scripts/os/jupyterlab_container_start.py
index c619e81..a7e50b1 100644
--- a/infrastructure-provisioning/src/general/scripts/os/jupyterlab_container_start.py
+++ b/infrastructure-provisioning/src/general/scripts/os/jupyterlab_container_start.py
@@ -25,7 +25,6 @@ import sys
 import argparse
 from datalab.fab import *
 from datalab.notebook_lib import *
-from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -49,13 +48,13 @@ def start_jupyterlab_container(jupyterlab_dir):
     except: sys.exit(1)
 
 if __name__ == "__main__":
-    logging.info("Configure connections")
+    print("Configure connections")
     global conn
     conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
-    logging.info("Starting Jupyter container")
+    print("Starting Jupyter container")
     try:
         start_jupyterlab_container(jupyterlab_dir)
     except Exception as err:
-        logging.error('Error: {0}'.format(err))
+        print('Error: {0}'.format(err))
         sys.exit(1)
     conn.close()
diff --git a/infrastructure-provisioning/src/general/scripts/os/manage_git_creds.py b/infrastructure-provisioning/src/general/scripts/os/manage_git_creds.py
index 6955f07..e8be6fb 100644
--- a/infrastructure-provisioning/src/general/scripts/os/manage_git_creds.py
+++ b/infrastructure-provisioning/src/general/scripts/os/manage_git_creds.py
@@ -28,7 +28,6 @@ import sys
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.notebook_lib import *
-from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -53,14 +52,14 @@ if __name__ == "__main__":
             conn.run('rm .gitcreds')
         git_creds = os.environ['git_creds']
     except KeyError as err:
-        logging.error('Error: {0}'.format(err))
-        logging.error("Parameter git_creds does not exist. Skipping.")
+        print('Error: {0}'.format(err))
+        print("Parameter git_creds does not exist. Skipping.")
         sys.exit(0)
 
     try:
         data = ast.literal_eval(git_creds)
     except Exception as err:
-        logging.error('Error: {0}'.format(err))
+        print('Error: {0}'.format(err))
         append_result("Failed to parse git credentials.", str(err))
         sys.exit(1)
 
@@ -84,7 +83,7 @@ if __name__ == "__main__":
         conn.put('new_gitcreds', '/home/{}/.gitcreds'.format(args.os_user))
 
     except Exception as err:
-        logging.error('Error: {0}'.format(err))
+        print('Error: {0}'.format(err))
         append_result("Failed to add host/login/(password/token) to config.", str(err))
         sys.exit(1)
 
diff --git a/infrastructure-provisioning/src/general/scripts/os/notebook_git_creds.py b/infrastructure-provisioning/src/general/scripts/os/notebook_git_creds.py
index fdfe87f..bb2b974 100644
--- a/infrastructure-provisioning/src/general/scripts/os/notebook_git_creds.py
+++ b/infrastructure-provisioning/src/general/scripts/os/notebook_git_creds.py
@@ -21,7 +21,7 @@
 #
 # ******************************************************************************
 
-from datalab.logger import logging
+import logging
 import os
 import sys
 import traceback
@@ -33,8 +33,16 @@ from fabric import *
 
 if __name__ == "__main__":
     instance_class = 'notebook'
+    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
+                                               os.environ['request_id'])
+    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
+    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
+                        level=logging.DEBUG,
+                        filename=local_log_filepath)
+
     try:
         logging.info('[SETUP USER GIT CREDENTIALS]')
+        print('[SETUP USER GIT CREDENTIALS]')
         notebook_config = dict()
         notebook_config['notebook_name'] = os.environ['notebook_instance_name']
         notebook_config['os_user'] = os.environ['conf_os_user']
@@ -53,7 +61,7 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        logging.error('Error: {0}'.format(err))
+        print('Error: {0}'.format(err))
         append_result("Failed to manage git credentials.", str(err))
         sys.exit(1)
 
diff --git a/infrastructure-provisioning/src/general/scripts/os/notebook_inactivity_check.py b/infrastructure-provisioning/src/general/scripts/os/notebook_inactivity_check.py
index c70593b..6784b81 100644
--- a/infrastructure-provisioning/src/general/scripts/os/notebook_inactivity_check.py
+++ b/infrastructure-provisioning/src/general/scripts/os/notebook_inactivity_check.py
@@ -28,11 +28,18 @@ import subprocess
 from datalab.actions_lib import *
 from datalab.fab import *
 from datalab.meta_lib import *
-from datalab.logger import logging
 
 if __name__ == "__main__":
+    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
+                                               os.environ['request_id'])
+    local_log_filepath = "/logs/project/" + local_log_filename
+    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
+                        level=logging.DEBUG,
+                        filename=local_log_filepath)
+
     try:
         logging.info('[ASK INACTIVITY STATUS]')
+        print('[ASK INACTIVITY STATUS]')
         notebook_config = dict()
         try:
             notebook_config['notebook_name'] = os.environ['notebook_instance_name']
@@ -50,7 +57,7 @@ if __name__ == "__main__":
             else:
                 notebook_config['dataengine_ip'] = '0.0.0.0'
         except Exception as err:
-            logging.error('Error: {0}'.format(err))
+            print('Error: {0}'.format(err))
             append_result("Failed to get parameter.", str(err))
             sys.exit(1)
         params = "--os_user {0} --instance_ip {1} --keyfile '{2}' --resource_type {3} --dataengine_ip {4}" \
@@ -62,6 +69,6 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        logging.error('Error: {0}'.format(err))
+        print('Error: {0}'.format(err))
         append_result("Failed to ask inactivity status.", str(err))
         sys.exit(1)
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/general/scripts/os/notebook_install_libs.py b/infrastructure-provisioning/src/general/scripts/os/notebook_install_libs.py
index b894449..50b9609 100644
--- a/infrastructure-provisioning/src/general/scripts/os/notebook_install_libs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/notebook_install_libs.py
@@ -21,7 +21,7 @@
 #
 # ******************************************************************************
 
-from datalab.logger import logging
+import logging
 import os
 import sys
 import traceback
@@ -33,8 +33,16 @@ from fabric import *
 
 if __name__ == "__main__":
     instance_class = 'notebook'
+    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
+                                               os.environ['request_id'])
+    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
+    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
+                        level=logging.DEBUG,
+                        filename=local_log_filepath)
+
     try:
         logging.info('[INSTALLING ADDITIONAL LIBRARIES ON NOTEBOOK INSTANCE]')
+        print('[INSTALLING ADDITIONAL LIBRARIES ON NOTEBOOK INSTANCE]')
         notebook_config = dict()
         try:
             notebook_config['notebook_name'] = os.environ['notebook_instance_name']
@@ -46,7 +54,7 @@ if __name__ == "__main__":
             notebook_config['keyfile'] = '{}{}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
             notebook_config['libs'] = os.environ['libs']
         except Exception as err:
-            logging.error('Error: {0}'.format(err))
+            print('Error: {0}'.format(err))
             append_result("Failed to get parameter.", str(err))
             sys.exit(1)
         params = '--os_user {} --instance_ip {} --keyfile "{}" --libs "{}"' \
@@ -59,6 +67,6 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        logging.error('Error: {0}'.format(err))
+        print('Error: {0}'.format(err))
         append_result("Failed to install additional libraries.", str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/os/notebook_list_libs.py b/infrastructure-provisioning/src/general/scripts/os/notebook_list_libs.py
index 4cf4215..e6e989e 100644
--- a/infrastructure-provisioning/src/general/scripts/os/notebook_list_libs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/notebook_list_libs.py
@@ -21,7 +21,7 @@
 #
 # ******************************************************************************
 
-from datalab.logger import logging
+import logging
 import os
 import sys
 import traceback
@@ -33,8 +33,16 @@ from fabric import *
 
 if __name__ == "__main__":
     instance_class = 'notebook'
+    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
+                                               os.environ['request_id'])
+    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
+    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
+                        level=logging.DEBUG,
+                        filename=local_log_filepath)
+
     try:
         logging.info('[GETTING AVAILABLE PACKAGES]')
+        print('[GETTING AVAILABLE PACKAGES]')
         notebook_config = dict()
         try:
             notebook_config['notebook_name'] = os.environ['notebook_instance_name']
@@ -46,7 +54,7 @@ if __name__ == "__main__":
                 notebook_config['tag_name'], notebook_config['notebook_name'])
             notebook_config['keyfile'] = '{}{}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
         except Exception as err:
-            logging.error('Error: {0}'.format(err))
+            print('Error: {0}'.format(err))
             append_result("Failed to get parameter.", str(err))
             sys.exit(1)
         params = "--os_user {} --instance_ip {} --keyfile '{}' --group {}" \
@@ -58,6 +66,6 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        logging.error('Error: {0}'.format(err))
+        print('Error: {0}'.format(err))
         append_result("Failed to get available libraries.", str(err))
         sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/os/notebook_reconfigure_spark.py b/infrastructure-provisioning/src/general/scripts/os/notebook_reconfigure_spark.py
index fa98c77..596d4d8 100644
--- a/infrastructure-provisioning/src/general/scripts/os/notebook_reconfigure_spark.py
+++ b/infrastructure-provisioning/src/general/scripts/os/notebook_reconfigure_spark.py
@@ -22,7 +22,7 @@
 # ******************************************************************************
 
 import json
-from datalab.logger import logging
+import logging
 import os
 import sys
 import traceback
@@ -34,8 +34,16 @@ from fabric import *
 
 if __name__ == "__main__":
     instance_class = 'notebook'
+    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
+                                               os.environ['request_id'])
+    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
+    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
+                        level=logging.DEBUG,
+                        filename=local_log_filepath)
+
     try:
         logging.info('[RECONFIGURING SPARK]')
+        print('[RECONFIGURING SPARK]')
         notebook_config = dict()
         try:
             notebook_config['notebook_name'] = os.environ['notebook_instance_name']
@@ -46,7 +54,7 @@ if __name__ == "__main__":
                 notebook_config['tag_name'], notebook_config['notebook_name'])
             notebook_config['keyfile'] = '{}{}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
         except Exception as err:
-            logging.error('Error: {0}'.format(err))
+            print('Error: {0}'.format(err))
             append_result("Failed to get parameter.", str(err))
             sys.exit(1)
         params = "--os_user {} --instance_ip {} --keyfile '{}' --resource_type notebook " \
@@ -58,7 +66,7 @@ if __name__ == "__main__":
             traceback.print_exc()
             raise Exception
     except Exception as err:
-        logging.error('Error: {0}'.format(err))
+        print('Error: {0}'.format(err))
         append_result("Failed to reconfigure Spark.", str(err))
         sys.exit(1)
 
@@ -67,8 +75,8 @@ if __name__ == "__main__":
         with open("/root/result.json", 'w') as result:
             res = {"service_base_name": notebook_config['service_base_name'],
                    "Action": "Reconfigure Spark on Notebook"}
-            logging.info(json.dumps(res))
+            print(json.dumps(res))
             result.write(json.dumps(res))
     except:
-        logging.error("Failed writing results.")
+        print("Failed writing results.")
         sys.exit(0)
diff --git a/infrastructure-provisioning/src/general/scripts/os/rstudio_dataengine_create_configs.py b/infrastructure-provisioning/src/general/scripts/os/rstudio_dataengine_create_configs.py
index 58f07af..c7e0017 100644
--- a/infrastructure-provisioning/src/general/scripts/os/rstudio_dataengine_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/rstudio_dataengine_create_configs.py
@@ -29,7 +29,6 @@ from datalab.actions_lib import *
 from datalab.common_lib import *
 from datalab.fab import *
 from datalab.notebook_lib import *
-from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -65,7 +64,7 @@ def configure_rstudio():
             subprocess.run('''R -e "source('/home/{}/.Rprofile')"'''.format(args.os_user), shell=True, check=True)
             subprocess.run('touch /home/' + args.os_user + '/.ensure_dir/rstudio_dataengine_ensured', shell=True, check=True)
         except Exception as err:
-            logging.error('Error: {0}'.format(err))
+            print('Error: {0}'.format(err))
             sys.exit(1)
     else:
         try:
@@ -79,7 +78,7 @@ def configure_rstudio():
                   args.os_user + '/.Rprofile', shell=True, check=True)
             subprocess.run('''R -e "source('/home/{}/.Rprofile')"'''.format(args.os_user), shell=True, check=True)
         except Exception as err:
-            logging.error('Error: {0}'.format(err))
+            print('Error: {0}'.format(err))
             sys.exit(1)
 
 
diff --git a/infrastructure-provisioning/src/general/scripts/os/superset_start.py b/infrastructure-provisioning/src/general/scripts/os/superset_start.py
index 88fb46f..9f976ae 100644
--- a/infrastructure-provisioning/src/general/scripts/os/superset_start.py
+++ b/infrastructure-provisioning/src/general/scripts/os/superset_start.py
@@ -25,7 +25,6 @@ import argparse
 import sys
 from datalab.fab import *
 from datalab.notebook_lib import *
-from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -48,14 +47,14 @@ def start_superset(superset_dir):
     except: sys.exit(1)
 
 if __name__ == "__main__":
-    logging.info("Configure connections")
+    print("Configure connections")
     global conn
     conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
-    logging.info("Starting Superset")
+    print("Starting Superset")
     try:
         start_superset(superset_dir)
     except Exception as err:
-        logging.error('Error: {0}'.format(err))
+        print('Error: {0}'.format(err))
         sys.exit(1)
 
     conn.close()
diff --git a/infrastructure-provisioning/src/general/scripts/os/tensor-rstudio_dataengine_create_configs.py b/infrastructure-provisioning/src/general/scripts/os/tensor-rstudio_dataengine_create_configs.py
index 3255bb0..c8965b6 100644
--- a/infrastructure-provisioning/src/general/scripts/os/tensor-rstudio_dataengine_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/tensor-rstudio_dataengine_create_configs.py
@@ -28,7 +28,6 @@ from datalab.actions_lib import *
 from datalab.common_lib import *
 from datalab.fab import *
 from datalab.notebook_lib import *
-from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -64,7 +63,7 @@ def configure_rstudio():
             subprocess.run('''R -e "source('/home/{}/.Rprofile')"'''.format(args.os_user), shell=True, check=True)
             subprocess.run('touch /home/' + args.os_user + '/.ensure_dir/rstudio_dataengine_ensured', shell=True, check=True)
         except Exception as err:
-            logging.error('Error: {0}'.format(err))
+            print('Error: {0}'.format(err))
             sys.exit(1)
     else:
         try:
@@ -78,7 +77,7 @@ def configure_rstudio():
                   args.os_user + '/.Rprofile', shell=True, check=True)
             subprocess.run('''R -e "source('/home/{}/.Rprofile')"'''.format(args.os_user), shell=True, check=True)
         except Exception as err:
-            logging.error('Error: {0}'.format(err))
+            print('Error: {0}'.format(err))
             sys.exit(1)
 
 
diff --git a/infrastructure-provisioning/src/general/scripts/os/tensor_install_dataengine_kernels.py b/infrastructure-provisioning/src/general/scripts/os/tensor_install_dataengine_kernels.py
index c155e42..e6d27aa 100644
--- a/infrastructure-provisioning/src/general/scripts/os/tensor_install_dataengine_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/os/tensor_install_dataengine_kernels.py
@@ -25,7 +25,6 @@ import argparse
 import os
 from datalab.fab import *
 from datalab.meta_lib import *
-from datalab.logger import logging
 from fabric import *
 from patchwork.files import exists
 from patchwork import files
@@ -101,7 +100,7 @@ def install_sparkamagic_kernels(args):
                 spark_master_ip, args.os_user))
         datalab.fab.conn.sudo('sudo chown -R {0}:{0} /home/{0}/.sparkmagic/'.format(args.os_user))
     except Exception as err:
-        logging.error(err)
+        print(err)
         sys.exit(1)
 
 def create_inactivity_log(master_ip, hoststring):
diff --git a/infrastructure-provisioning/src/general/scripts/os/zeppelin_dataengine_create_configs.py b/infrastructure-provisioning/src/general/scripts/os/zeppelin_dataengine_create_configs.py
index 52cf241..7f424eb 100644
--- a/infrastructure-provisioning/src/general/scripts/os/zeppelin_dataengine_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/zeppelin_dataengine_create_configs.py
@@ -29,7 +29,6 @@ from datalab.actions_lib import *
 from datalab.common_lib import *
 from datalab.fab import *
 from datalab.notebook_lib import *
-from datalab.logger import logging
 from fabric import *
 
 parser = argparse.ArgumentParser()
@@ -147,7 +146,7 @@ def configure_zeppelin_dataengine_interpreter(cluster_name, cluster_dir, os_user
                         subprocess.run('sleep 5', shell=True, check=True)
         subprocess.run('touch /home/' + os_user + '/.ensure_dir/dataengine_' + cluster_name + '_interpreter_ensured', shell=True, check=True)
     except Exception as err:
-        logging.error('Error: {0}'.format(err))
+        print('Error: {0}'.format(err))
         sys.exit(1)
 
 

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org