You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@datalab.apache.org by lf...@apache.org on 2021/10/12 14:08:38 UTC
[incubator-datalab] 03/08: [DATALAB-2409]: replaced print with
logging in all general/scripts/os .py scripts
This is an automated email from the ASF dual-hosted git repository.
lfrolov pushed a commit to branch DATALAB-2409
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git
commit 0c304cc92787ac40eb7f59806848de0a5d6f3cd8
Author: leonidfrolov <fr...@gmail.com>
AuthorDate: Mon Oct 11 18:00:04 2021 +0300
[DATALAB-2409]: replaced print with logging in all general/scripts/os .py scripts
---
.../src/general/scripts/os/common_clean_instance.py | 17 +++++++++--------
.../src/general/scripts/os/common_configure_proxy.py | 5 +++--
.../scripts/os/common_configure_reverse_proxy.py | 7 ++++---
.../src/general/scripts/os/common_configure_spark.py | 7 ++++---
.../general/scripts/os/configure_proxy_for_docker.py | 7 ++++---
.../src/general/scripts/os/dataengine_install_libs.py | 14 +++-----------
.../src/general/scripts/os/dataengine_list_libs.py | 14 +++-----------
.../general/scripts/os/dataengine_reconfigure_spark.py | 18 +++++-------------
.../src/general/scripts/os/get_list_available_pkgs.py | 5 +++--
.../src/general/scripts/os/install_additional_libs.py | 17 +++++++++--------
.../scripts/os/jupyter_install_dataengine_kernels.py | 3 ++-
.../general/scripts/os/jupyterlab_container_start.py | 7 ++++---
.../src/general/scripts/os/manage_git_creds.py | 9 +++++----
.../src/general/scripts/os/notebook_git_creds.py | 12 ++----------
.../general/scripts/os/notebook_inactivity_check.py | 13 +++----------
.../src/general/scripts/os/notebook_install_libs.py | 14 +++-----------
.../src/general/scripts/os/notebook_list_libs.py | 14 +++-----------
.../general/scripts/os/notebook_reconfigure_spark.py | 18 +++++-------------
.../scripts/os/rstudio_dataengine_create_configs.py | 5 +++--
.../src/general/scripts/os/superset_start.py | 7 ++++---
.../os/tensor-rstudio_dataengine_create_configs.py | 5 +++--
.../scripts/os/tensor_install_dataengine_kernels.py | 3 ++-
.../scripts/os/zeppelin_dataengine_create_configs.py | 3 ++-
23 files changed, 88 insertions(+), 136 deletions(-)
diff --git a/infrastructure-provisioning/src/general/scripts/os/common_clean_instance.py b/infrastructure-provisioning/src/general/scripts/os/common_clean_instance.py
index a9d370e..99e7904 100644
--- a/infrastructure-provisioning/src/general/scripts/os/common_clean_instance.py
+++ b/infrastructure-provisioning/src/general/scripts/os/common_clean_instance.py
@@ -25,6 +25,7 @@ import argparse
import os
import sys
from datalab.notebook_lib import *
+from datalab.logger import logging
from fabric import *
parser = argparse.ArgumentParser()
@@ -48,7 +49,7 @@ def general_clean():
remove_os_pkg(['nodejs', 'npm'])
conn.sudo('sed -i "/spark.*.memory/d" /opt/spark/conf/spark-defaults.conf')
except Exception as err:
- print('Error: {0}'.format(err))
+ logging.error('Error: {0}'.format(err))
sys.exit(1)
@@ -64,7 +65,7 @@ def clean_jupyter():
conn.sudo('rm -f /etc/systemd/system/jupyter-notebook.service')
conn.sudo('systemctl daemon-reload')
except Exception as err:
- print('Error: {0}'.format(err))
+ logging.error('Error: {0}'.format(err))
sys.exit(1)
@@ -79,7 +80,7 @@ def clean_zeppelin():
conn.sudo('rm -f /etc/systemd/system/zeppelin-notebook.service')
conn.sudo('systemctl daemon-reload')
except Exception as err:
- print('Error: {0}'.format(err))
+ logging.error('Error: {0}'.format(err))
sys.exit(1)
@@ -89,7 +90,7 @@ def clean_rstudio():
conn.sudo('rm -f /home/{}/.Rprofile'.format(args.os_user))
conn.sudo('rm -f /home/{}/.Renviron'.format(args.os_user))
except Exception as err:
- print('Error:', str(err))
+ logging.error('Error:', str(err))
sys.exit(1)
@@ -100,7 +101,7 @@ def clean_tensor():
conn.sudo('systemctl disable tensorboard')
conn.sudo('systemctl daemon-reload')
except Exception as err:
- print('Error: {0}'.format(err))
+ logging.error('Error: {0}'.format(err))
sys.exit(1)
@@ -111,12 +112,12 @@ def clean_tensor_rstudio():
conn.sudo('systemctl disable tensorboard')
conn.sudo('systemctl daemon-reload')
except Exception as err:
- print('Error: {0}'.format(err))
+ logging.error('Error: {0}'.format(err))
sys.exit(1)
if __name__ == "__main__":
- print('Configure connections')
+ logging.info('Configure connections')
global conn
conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
@@ -155,6 +156,6 @@ if __name__ == "__main__":
elif args.application == ('tensor-rstudio'):
clean_tensor_rstudio()
else:
- print('Found default ami, do not make clean')
+ logging.info('Found default ami, do not make clean')
#conn.close()
sys.exit(0)
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/general/scripts/os/common_configure_proxy.py b/infrastructure-provisioning/src/general/scripts/os/common_configure_proxy.py
index 604a23a..19666df 100644
--- a/infrastructure-provisioning/src/general/scripts/os/common_configure_proxy.py
+++ b/infrastructure-provisioning/src/general/scripts/os/common_configure_proxy.py
@@ -25,6 +25,7 @@ import argparse
import json
from datalab.notebook_lib import *
from datalab.fab import *
+from datalab.logger import logging
from fabric import *
parser = argparse.ArgumentParser()
@@ -40,12 +41,12 @@ args = parser.parse_args()
# Run script #
##############
if __name__ == "__main__":
- print("Configure connections")
+ logging.info("Configure connections")
global conn
conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
deeper_config = json.loads(args.additional_config)
- print("Enabling proxy for notebook server for repositories access.")
+ logging.info("Enabling proxy for notebook server for repositories access.")
datalab.notebook_lib.enable_proxy(deeper_config['proxy_host'], deeper_config['proxy_port'])
conn.close()
diff --git a/infrastructure-provisioning/src/general/scripts/os/common_configure_reverse_proxy.py b/infrastructure-provisioning/src/general/scripts/os/common_configure_reverse_proxy.py
index 38ea331..f43a3a9 100644
--- a/infrastructure-provisioning/src/general/scripts/os/common_configure_reverse_proxy.py
+++ b/infrastructure-provisioning/src/general/scripts/os/common_configure_reverse_proxy.py
@@ -26,6 +26,7 @@ import json
import sys
from datalab.fab import *
from datalab.meta_lib import get_instance_private_ip_address
+from datalab.logger import logging
from fabric import *
from jinja2 import Environment, FileSystemLoader
from datalab.fab import *
@@ -96,15 +97,15 @@ def make_template():
# Run script #
##############
if __name__ == "__main__":
- print("Make template")
+ logging.info("Make template")
try:
conf_file_name = make_template()
except Exception as err:
- print('Error: {0}'.format(err))
+ logging.error('Error: {0}'.format(err))
sys.exit(1)
- print("Configure connections")
+ logging.info("Configure connections")
global conn
conn = datalab.fab.init_datalab_connection(args.edge_hostname, args.os_user, args.keyfile)
conn.put('/tmp/{}.conf'.format(conf_file_name), '/tmp/{}.conf'.format(conf_file_name))
diff --git a/infrastructure-provisioning/src/general/scripts/os/common_configure_spark.py b/infrastructure-provisioning/src/general/scripts/os/common_configure_spark.py
index 47f7b78..21477c8 100644
--- a/infrastructure-provisioning/src/general/scripts/os/common_configure_spark.py
+++ b/infrastructure-provisioning/src/general/scripts/os/common_configure_spark.py
@@ -28,6 +28,7 @@ import sys
import time
from datalab.fab import *
from datalab.notebook_lib import *
+from datalab.logger import logging
from fabric import *
parser = argparse.ArgumentParser()
@@ -47,7 +48,7 @@ def update_spark_defaults_conf(spark_conf):
conn.sudo('''sed -i '/^# Updated/d' {0}'''.format(conf))
conn.sudo('''echo "# Updated by DATALAB at {0} >> {1}'''.format(timestamp, conf))
except Exception as err:
- print('Error: {0}'.format(err))
+ logging.error('Error: {0}'.format(err))
sys.exit(1)
@@ -76,12 +77,12 @@ def add_custom_spark_properties(cluster_name):
conn.sudo('echo "{0}" >> /opt/{1}/spark/conf/spark-defaults.conf'.format(prop, cluster_name))
conn.sudo('sed -i "/^\s*$/d" /opt/{0}/spark/conf/spark-defaults.conf'.format(cluster_name))
except Exception as err:
- print('Error: {0}'.format(err))
+ logging.error('Error: {0}'.format(err))
sys.exit(1)
if __name__ == "__main__":
- print('Configure connections')
+ logging.info('Configure connections')
global conn
conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
diff --git a/infrastructure-provisioning/src/general/scripts/os/configure_proxy_for_docker.py b/infrastructure-provisioning/src/general/scripts/os/configure_proxy_for_docker.py
index a233750..72c1c9c 100644
--- a/infrastructure-provisioning/src/general/scripts/os/configure_proxy_for_docker.py
+++ b/infrastructure-provisioning/src/general/scripts/os/configure_proxy_for_docker.py
@@ -25,6 +25,7 @@ import argparse
import sys
from fabric import *
from datalab.fab import *
+from datalab.logger import logging
parser = argparse.ArgumentParser()
parser.add_argument('--hostname', type=str, default='')
@@ -36,10 +37,10 @@ http_file = '/etc/systemd/system/docker.service.d/http-proxy.conf'
https_file = '/etc/systemd/system/docker.service.d/https-proxy.conf'
if __name__ == "__main__":
- print("Configure connections")
+ logging.info("Configure connections")
global conn
conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
- print("Configuring proxy for docker")
+ logging.info("Configuring proxy for docker")
try:
conn.sudo('mkdir -p /etc/systemd/system/docker.service.d')
conn.sudo('touch {}'.format(http_file))
@@ -56,6 +57,6 @@ if __name__ == "__main__":
conn.sudo('update-rc.d docker enable')
conn.sudo('systemctl restart docker')
except Exception as err:
- print('Error: {0}'.format(err))
+ logging.error('Error: {0}'.format(err))
sys.exit(1)
conn.close()
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/general/scripts/os/dataengine_install_libs.py b/infrastructure-provisioning/src/general/scripts/os/dataengine_install_libs.py
index 05f4885..6ce8615 100644
--- a/infrastructure-provisioning/src/general/scripts/os/dataengine_install_libs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/dataengine_install_libs.py
@@ -21,7 +21,7 @@
#
# ******************************************************************************
-import logging
+from datalab.logger import logging
import multiprocessing
import os
import sys
@@ -44,22 +44,14 @@ def install_libs_on_slaves(slave, data_engine):
# Run script to install additional libs
subprocess.run("~/scripts/{}.py {}".format('install_additional_libs', params), shell=True, check=True)
except Exception as err:
- print('Error: {0}'.format(err))
+ logging.error('Error: {0}'.format(err))
sys.exit(1)
if __name__ == "__main__":
instance_class = 'notebook'
- local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
- os.environ['request_id'])
- local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
- logging.basicConfig(format='%(levelname)-8s [%(asctime)s] %(message)s',
- level=logging.DEBUG,
- filename=local_log_filepath)
-
try:
logging.info('[INSTALLING ADDITIONAL LIBRARIES ON DATAENGINE]')
- print('[INSTALLING ADDITIONAL LIBRARIES ON DATAENGINE]')
data_engine = dict()
try:
data_engine['os_user'] = os.environ['conf_os_user']
@@ -100,6 +92,6 @@ if __name__ == "__main__":
traceback.print_exc()
raise Exception
except Exception as err:
- print('Error: {0}'.format(err))
+ logging.error('Error: {0}'.format(err))
append_result("Failed to install additional libraries.", str(err))
sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/os/dataengine_list_libs.py b/infrastructure-provisioning/src/general/scripts/os/dataengine_list_libs.py
index 2580279..b52df61 100644
--- a/infrastructure-provisioning/src/general/scripts/os/dataengine_list_libs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/dataengine_list_libs.py
@@ -21,7 +21,7 @@
#
# ******************************************************************************
-import logging
+from datalab.logger import logging
import os
import sys
import traceback
@@ -33,16 +33,8 @@ from fabric import *
if __name__ == "__main__":
instance_class = 'notebook'
- local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
- os.environ['request_id'])
- local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
- logging.basicConfig(format='%(levelname)-8s [%(asctime)s] %(message)s',
- level=logging.DEBUG,
- filename=local_log_filepath)
-
try:
logging.info('[GETTING AVAILABLE PACKAGES]')
- print('[GETTING AVAILABLE PACKAGES]')
data_engine = dict()
try:
data_engine['os_user'] = os.environ['conf_os_user']
@@ -55,7 +47,7 @@ if __name__ == "__main__":
data_engine['tag_name'], data_engine['master_node_name'])
data_engine['keyfile'] = '{}{}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
except Exception as err:
- print('Error: {0}'.format(err))
+ logging.error('Error: {0}'.format(err))
append_result("Failed to get parameter.", str(err))
sys.exit(1)
params = "--os_user {} --instance_ip {} --keyfile '{}' --group {}" \
@@ -67,6 +59,6 @@ if __name__ == "__main__":
traceback.print_exc()
raise Exception
except Exception as err:
- print('Error: {0}'.format(err))
+ logging.error('Error: {0}'.format(err))
append_result("Failed to get available libraries.", str(err))
sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/os/dataengine_reconfigure_spark.py b/infrastructure-provisioning/src/general/scripts/os/dataengine_reconfigure_spark.py
index 41e07b4..08a56a6 100644
--- a/infrastructure-provisioning/src/general/scripts/os/dataengine_reconfigure_spark.py
+++ b/infrastructure-provisioning/src/general/scripts/os/dataengine_reconfigure_spark.py
@@ -22,7 +22,7 @@
# ******************************************************************************
import json
-import logging
+from datalab.logger import logging
import multiprocessing
import os
import sys
@@ -43,22 +43,14 @@ def install_libs_on_slaves(slave, data_engine):
# Run script to install additional libs
subprocess.run("~/scripts/{}.py {}".format('reconfigure_spark', params), shell=True, check=True)
except Exception as err:
- print('Error: {0}'.format(err))
+ logging.error('Error: {0}'.format(err))
sys.exit(1)
if __name__ == "__main__":
instance_class = 'notebook'
- local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
- os.environ['request_id'])
- local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
- logging.basicConfig(format='%(levelname)-8s [%(asctime)s] %(message)s',
- level=logging.DEBUG,
- filename=local_log_filepath)
-
try:
logging.info('[RECONFIGURING SPARK ON DATAENGINE]')
- print('[RECONFIGURING SPARK ON DATAENGINE]')
data_engine = dict()
try:
data_engine['os_user'] = os.environ['conf_os_user']
@@ -111,7 +103,7 @@ if __name__ == "__main__":
raise Exception
except Exception as err:
- print('Error: {0}'.format(err))
+ logging.error('Error: {0}'.format(err))
append_result("Failed to reconfigure Spark.", str(err))
sys.exit(1)
@@ -119,8 +111,8 @@ if __name__ == "__main__":
with open("/root/result.json", 'w') as result:
res = {"service_base_name": data_engine['service_base_name'],
"Action": "Reconfigure Spark on Data Engine"}
- print(json.dumps(res))
+ logging.info(json.dumps(res))
result.write(json.dumps(res))
except:
- print("Failed writing results.")
+ logging.error("Failed writing results.")
sys.exit(0)
diff --git a/infrastructure-provisioning/src/general/scripts/os/get_list_available_pkgs.py b/infrastructure-provisioning/src/general/scripts/os/get_list_available_pkgs.py
index 8e33b20..9d25eb7 100644
--- a/infrastructure-provisioning/src/general/scripts/os/get_list_available_pkgs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/get_list_available_pkgs.py
@@ -28,6 +28,7 @@ import time
import xmlrpc.client
from datalab.fab import *
from datalab.notebook_lib import *
+from datalab.logger import logging
from fabric import *
parser = argparse.ArgumentParser()
@@ -53,7 +54,7 @@ def get_available_pip_pkgs(version):
time.sleep(5)
continue
except Exception as err:
- print('Error: {0}'.format(err))
+ logging.error('Error: {0}'.format(err))
sys.exit(1)
@@ -70,7 +71,7 @@ def get_uncategorised_pip_pkgs(all_pkgs_pip2, all_pkgs_pip3):
pip_pkgs[pkg] = "N/A"
return pip_pkgs
except Exception as err:
- print('Error: {0}'.format(err))
+ logging.error('Error: {0}'.format(err))
sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/os/install_additional_libs.py b/infrastructure-provisioning/src/general/scripts/os/install_additional_libs.py
index c3661e4..f18f0cb 100644
--- a/infrastructure-provisioning/src/general/scripts/os/install_additional_libs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/install_additional_libs.py
@@ -27,6 +27,7 @@ import json
import sys
from datalab.fab import *
from datalab.notebook_lib import *
+from datalab.logger import logging
from fabric import *
parser = argparse.ArgumentParser()
@@ -42,7 +43,7 @@ if __name__ == "__main__":
global conn
conn = datalab.fab.init_datalab_connection(args.instance_ip, args.os_user, args.keyfile)
- print('Installing libraries: {}'.format(args.libs))
+ logging.info('Installing libraries: {}'.format(args.libs))
general_status = list()
data = ast.literal_eval(args.libs)
pkgs = {"libraries": {}}
@@ -59,40 +60,40 @@ if __name__ == "__main__":
pkgs['libraries'][data[row]['group']].append(
[data[row]['name'], data[row]['version']])
except Exception as err:
- print('Error: {0}'.format(err))
+ logging.error('Error: {0}'.format(err))
append_result("Failed to parse libs list.", str(err))
sys.exit(1)
try:
- print('Installing os packages: {}'.format(pkgs['libraries']['os_pkg']))
+ logging.info('Installing os packages: {}'.format(pkgs['libraries']['os_pkg']))
status = install_os_pkg(pkgs['libraries']['os_pkg'])
general_status = general_status + status
except KeyError:
pass
try:
- print('Installing java dependencies: {}'.format(pkgs['libraries']['java']))
+ logging.info('Installing java dependencies: {}'.format(pkgs['libraries']['java']))
status = install_java_pkg(pkgs['libraries']['java'])
general_status = general_status + status
except KeyError:
pass
#try:
- #print('Installing pip2 packages: {}'.format(pkgs['libraries']['pip2']))
+ #logging.info('Installing pip2 packages: {}'.format(pkgs['libraries']['pip2']))
#status = install_pip_pkg(pkgs['libraries']['pip2'], 'pip2', 'pip2', args.dataengine_service)
#general_status = general_status + status
#except KeyError:
#pass
try:
- print('Installing pip3 packages: {}'.format(pkgs['libraries']['pip3']))
+ logging.info('Installing pip3 packages: {}'.format(pkgs['libraries']['pip3']))
status = install_pip_pkg(pkgs['libraries']['pip3'], 'pip3', 'pip3', args.dataengine_service)
general_status = general_status + status
except KeyError:
pass
try:
- print('Installing other packages (only tries pip3): {}'.format(pkgs['libraries']['others']))
+ logging.info('Installing other packages (only tries pip3): {}'.format(pkgs['libraries']['others']))
for pkg in pkgs['libraries']['others']:
status_pip3 = install_pip_pkg([pkg], 'pip3', 'others', args.dataengine_service)
general_status = general_status + status_pip3
@@ -103,7 +104,7 @@ if __name__ == "__main__":
and os.environ['notebook_r_enabled'] == 'true')\
or os.environ['application'] in ('rstudio', 'tensor-rstudio'):
try:
- print('Installing R packages: {}'.format(pkgs['libraries']['r_pkg']))
+ logging.info('Installing R packages: {}'.format(pkgs['libraries']['r_pkg']))
status = install_r_pkg(pkgs['libraries']['r_pkg'])
general_status = general_status + status
except KeyError:
diff --git a/infrastructure-provisioning/src/general/scripts/os/jupyter_install_dataengine_kernels.py b/infrastructure-provisioning/src/general/scripts/os/jupyter_install_dataengine_kernels.py
index 8fbc014..560ba1d 100644
--- a/infrastructure-provisioning/src/general/scripts/os/jupyter_install_dataengine_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/os/jupyter_install_dataengine_kernels.py
@@ -25,6 +25,7 @@ import argparse
import os
from datalab.fab import *
from datalab.meta_lib import *
+from datalab.logger import logging
from fabric import *
from patchwork.files import exists
from patchwork import files
@@ -107,7 +108,7 @@ def install_sparkamagic_kernels(args):
spark_master_ip, args.os_user))
datalab.fab.conn.sudo('sudo chown -R {0}:{0} /home/{0}/.sparkmagic/'.format(args.os_user))
except Exception as err:
- print(err)
+ logging.error(err)
sys.exit(1)
def create_inactivity_log(master_ip):
diff --git a/infrastructure-provisioning/src/general/scripts/os/jupyterlab_container_start.py b/infrastructure-provisioning/src/general/scripts/os/jupyterlab_container_start.py
index a7e50b1..c619e81 100644
--- a/infrastructure-provisioning/src/general/scripts/os/jupyterlab_container_start.py
+++ b/infrastructure-provisioning/src/general/scripts/os/jupyterlab_container_start.py
@@ -25,6 +25,7 @@ import sys
import argparse
from datalab.fab import *
from datalab.notebook_lib import *
+from datalab.logger import logging
from fabric import *
parser = argparse.ArgumentParser()
@@ -48,13 +49,13 @@ def start_jupyterlab_container(jupyterlab_dir):
except: sys.exit(1)
if __name__ == "__main__":
- print("Configure connections")
+ logging.info("Configure connections")
global conn
conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
- print("Starting Jupyter container")
+ logging.info("Starting Jupyter container")
try:
start_jupyterlab_container(jupyterlab_dir)
except Exception as err:
- print('Error: {0}'.format(err))
+ logging.error('Error: {0}'.format(err))
sys.exit(1)
conn.close()
diff --git a/infrastructure-provisioning/src/general/scripts/os/manage_git_creds.py b/infrastructure-provisioning/src/general/scripts/os/manage_git_creds.py
index e8be6fb..6955f07 100644
--- a/infrastructure-provisioning/src/general/scripts/os/manage_git_creds.py
+++ b/infrastructure-provisioning/src/general/scripts/os/manage_git_creds.py
@@ -28,6 +28,7 @@ import sys
from datalab.actions_lib import *
from datalab.fab import *
from datalab.notebook_lib import *
+from datalab.logger import logging
from fabric import *
parser = argparse.ArgumentParser()
@@ -52,14 +53,14 @@ if __name__ == "__main__":
conn.run('rm .gitcreds')
git_creds = os.environ['git_creds']
except KeyError as err:
- print('Error: {0}'.format(err))
- print("Parameter git_creds does not exist. Skipping.")
+ logging.error('Error: {0}'.format(err))
+ logging.error("Parameter git_creds does not exist. Skipping.")
sys.exit(0)
try:
data = ast.literal_eval(git_creds)
except Exception as err:
- print('Error: {0}'.format(err))
+ logging.error('Error: {0}'.format(err))
append_result("Failed to parse git credentials.", str(err))
sys.exit(1)
@@ -83,7 +84,7 @@ if __name__ == "__main__":
conn.put('new_gitcreds', '/home/{}/.gitcreds'.format(args.os_user))
except Exception as err:
- print('Error: {0}'.format(err))
+ logging.error('Error: {0}'.format(err))
append_result("Failed to add host/login/(password/token) to config.", str(err))
sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/os/notebook_git_creds.py b/infrastructure-provisioning/src/general/scripts/os/notebook_git_creds.py
index bb2b974..fdfe87f 100644
--- a/infrastructure-provisioning/src/general/scripts/os/notebook_git_creds.py
+++ b/infrastructure-provisioning/src/general/scripts/os/notebook_git_creds.py
@@ -21,7 +21,7 @@
#
# ******************************************************************************
-import logging
+from datalab.logger import logging
import os
import sys
import traceback
@@ -33,16 +33,8 @@ from fabric import *
if __name__ == "__main__":
instance_class = 'notebook'
- local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
- os.environ['request_id'])
- local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
- logging.basicConfig(format='%(levelname)-8s [%(asctime)s] %(message)s',
- level=logging.DEBUG,
- filename=local_log_filepath)
-
try:
logging.info('[SETUP USER GIT CREDENTIALS]')
- print('[SETUP USER GIT CREDENTIALS]')
notebook_config = dict()
notebook_config['notebook_name'] = os.environ['notebook_instance_name']
notebook_config['os_user'] = os.environ['conf_os_user']
@@ -61,7 +53,7 @@ if __name__ == "__main__":
traceback.print_exc()
raise Exception
except Exception as err:
- print('Error: {0}'.format(err))
+ logging.error('Error: {0}'.format(err))
append_result("Failed to manage git credentials.", str(err))
sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/os/notebook_inactivity_check.py b/infrastructure-provisioning/src/general/scripts/os/notebook_inactivity_check.py
index 6784b81..c70593b 100644
--- a/infrastructure-provisioning/src/general/scripts/os/notebook_inactivity_check.py
+++ b/infrastructure-provisioning/src/general/scripts/os/notebook_inactivity_check.py
@@ -28,18 +28,11 @@ import subprocess
from datalab.actions_lib import *
from datalab.fab import *
from datalab.meta_lib import *
+from datalab.logger import logging
if __name__ == "__main__":
- local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
- os.environ['request_id'])
- local_log_filepath = "/logs/project/" + local_log_filename
- logging.basicConfig(format='%(levelname)-8s [%(asctime)s] %(message)s',
- level=logging.DEBUG,
- filename=local_log_filepath)
-
try:
logging.info('[ASK INACTIVITY STATUS]')
- print('[ASK INACTIVITY STATUS]')
notebook_config = dict()
try:
notebook_config['notebook_name'] = os.environ['notebook_instance_name']
@@ -57,7 +50,7 @@ if __name__ == "__main__":
else:
notebook_config['dataengine_ip'] = '0.0.0.0'
except Exception as err:
- print('Error: {0}'.format(err))
+ logging.error('Error: {0}'.format(err))
append_result("Failed to get parameter.", str(err))
sys.exit(1)
params = "--os_user {0} --instance_ip {1} --keyfile '{2}' --resource_type {3} --dataengine_ip {4}" \
@@ -69,6 +62,6 @@ if __name__ == "__main__":
traceback.print_exc()
raise Exception
except Exception as err:
- print('Error: {0}'.format(err))
+ logging.error('Error: {0}'.format(err))
append_result("Failed to ask inactivity status.", str(err))
sys.exit(1)
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/general/scripts/os/notebook_install_libs.py b/infrastructure-provisioning/src/general/scripts/os/notebook_install_libs.py
index 50b9609..b894449 100644
--- a/infrastructure-provisioning/src/general/scripts/os/notebook_install_libs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/notebook_install_libs.py
@@ -21,7 +21,7 @@
#
# ******************************************************************************
-import logging
+from datalab.logger import logging
import os
import sys
import traceback
@@ -33,16 +33,8 @@ from fabric import *
if __name__ == "__main__":
instance_class = 'notebook'
- local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
- os.environ['request_id'])
- local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
- logging.basicConfig(format='%(levelname)-8s [%(asctime)s] %(message)s',
- level=logging.DEBUG,
- filename=local_log_filepath)
-
try:
logging.info('[INSTALLING ADDITIONAL LIBRARIES ON NOTEBOOK INSTANCE]')
- print('[INSTALLING ADDITIONAL LIBRARIES ON NOTEBOOK INSTANCE]')
notebook_config = dict()
try:
notebook_config['notebook_name'] = os.environ['notebook_instance_name']
@@ -54,7 +46,7 @@ if __name__ == "__main__":
notebook_config['keyfile'] = '{}{}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
notebook_config['libs'] = os.environ['libs']
except Exception as err:
- print('Error: {0}'.format(err))
+ logging.error('Error: {0}'.format(err))
append_result("Failed to get parameter.", str(err))
sys.exit(1)
params = '--os_user {} --instance_ip {} --keyfile "{}" --libs "{}"' \
@@ -67,6 +59,6 @@ if __name__ == "__main__":
traceback.print_exc()
raise Exception
except Exception as err:
- print('Error: {0}'.format(err))
+ logging.error('Error: {0}'.format(err))
append_result("Failed to install additional libraries.", str(err))
sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/os/notebook_list_libs.py b/infrastructure-provisioning/src/general/scripts/os/notebook_list_libs.py
index e6e989e..4cf4215 100644
--- a/infrastructure-provisioning/src/general/scripts/os/notebook_list_libs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/notebook_list_libs.py
@@ -21,7 +21,7 @@
#
# ******************************************************************************
-import logging
+from datalab.logger import logging
import os
import sys
import traceback
@@ -33,16 +33,8 @@ from fabric import *
if __name__ == "__main__":
instance_class = 'notebook'
- local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
- os.environ['request_id'])
- local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
- logging.basicConfig(format='%(levelname)-8s [%(asctime)s] %(message)s',
- level=logging.DEBUG,
- filename=local_log_filepath)
-
try:
logging.info('[GETTING AVAILABLE PACKAGES]')
- print('[GETTING AVAILABLE PACKAGES]')
notebook_config = dict()
try:
notebook_config['notebook_name'] = os.environ['notebook_instance_name']
@@ -54,7 +46,7 @@ if __name__ == "__main__":
notebook_config['tag_name'], notebook_config['notebook_name'])
notebook_config['keyfile'] = '{}{}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
except Exception as err:
- print('Error: {0}'.format(err))
+ logging.error('Error: {0}'.format(err))
append_result("Failed to get parameter.", str(err))
sys.exit(1)
params = "--os_user {} --instance_ip {} --keyfile '{}' --group {}" \
@@ -66,6 +58,6 @@ if __name__ == "__main__":
traceback.print_exc()
raise Exception
except Exception as err:
- print('Error: {0}'.format(err))
+ logging.error('Error: {0}'.format(err))
append_result("Failed to get available libraries.", str(err))
sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/os/notebook_reconfigure_spark.py b/infrastructure-provisioning/src/general/scripts/os/notebook_reconfigure_spark.py
index 596d4d8..fa98c77 100644
--- a/infrastructure-provisioning/src/general/scripts/os/notebook_reconfigure_spark.py
+++ b/infrastructure-provisioning/src/general/scripts/os/notebook_reconfigure_spark.py
@@ -22,7 +22,7 @@
# ******************************************************************************
import json
-import logging
+from datalab.logger import logging
import os
import sys
import traceback
@@ -34,16 +34,8 @@ from fabric import *
if __name__ == "__main__":
instance_class = 'notebook'
- local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['project_name'],
- os.environ['request_id'])
- local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
- logging.basicConfig(format='%(levelname)-8s [%(asctime)s] %(message)s',
- level=logging.DEBUG,
- filename=local_log_filepath)
-
try:
logging.info('[RECONFIGURING SPARK]')
- print('[RECONFIGURING SPARK]')
notebook_config = dict()
try:
notebook_config['notebook_name'] = os.environ['notebook_instance_name']
@@ -54,7 +46,7 @@ if __name__ == "__main__":
notebook_config['tag_name'], notebook_config['notebook_name'])
notebook_config['keyfile'] = '{}{}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
except Exception as err:
- print('Error: {0}'.format(err))
+ logging.error('Error: {0}'.format(err))
append_result("Failed to get parameter.", str(err))
sys.exit(1)
params = "--os_user {} --instance_ip {} --keyfile '{}' --resource_type notebook " \
@@ -66,7 +58,7 @@ if __name__ == "__main__":
traceback.print_exc()
raise Exception
except Exception as err:
- print('Error: {0}'.format(err))
+ logging.error('Error: {0}'.format(err))
append_result("Failed to reconfigure Spark.", str(err))
sys.exit(1)
@@ -75,8 +67,8 @@ if __name__ == "__main__":
with open("/root/result.json", 'w') as result:
res = {"service_base_name": notebook_config['service_base_name'],
"Action": "Reconfigure Spark on Notebook"}
- print(json.dumps(res))
+ logging.info(json.dumps(res))
result.write(json.dumps(res))
except:
- print("Failed writing results.")
+ logging.error("Failed writing results.")
sys.exit(0)
diff --git a/infrastructure-provisioning/src/general/scripts/os/rstudio_dataengine_create_configs.py b/infrastructure-provisioning/src/general/scripts/os/rstudio_dataengine_create_configs.py
index c7e0017..58f07af 100644
--- a/infrastructure-provisioning/src/general/scripts/os/rstudio_dataengine_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/rstudio_dataengine_create_configs.py
@@ -29,6 +29,7 @@ from datalab.actions_lib import *
from datalab.common_lib import *
from datalab.fab import *
from datalab.notebook_lib import *
+from datalab.logger import logging
from fabric import *
parser = argparse.ArgumentParser()
@@ -64,7 +65,7 @@ def configure_rstudio():
subprocess.run('''R -e "source('/home/{}/.Rprofile')"'''.format(args.os_user), shell=True, check=True)
subprocess.run('touch /home/' + args.os_user + '/.ensure_dir/rstudio_dataengine_ensured', shell=True, check=True)
except Exception as err:
- print('Error: {0}'.format(err))
+ logging.error('Error: {0}'.format(err))
sys.exit(1)
else:
try:
@@ -78,7 +79,7 @@ def configure_rstudio():
args.os_user + '/.Rprofile', shell=True, check=True)
subprocess.run('''R -e "source('/home/{}/.Rprofile')"'''.format(args.os_user), shell=True, check=True)
except Exception as err:
- print('Error: {0}'.format(err))
+ logging.error('Error: {0}'.format(err))
sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/os/superset_start.py b/infrastructure-provisioning/src/general/scripts/os/superset_start.py
index 9f976ae..88fb46f 100644
--- a/infrastructure-provisioning/src/general/scripts/os/superset_start.py
+++ b/infrastructure-provisioning/src/general/scripts/os/superset_start.py
@@ -25,6 +25,7 @@ import argparse
import sys
from datalab.fab import *
from datalab.notebook_lib import *
+from datalab.logger import logging
from fabric import *
parser = argparse.ArgumentParser()
@@ -47,14 +48,14 @@ def start_superset(superset_dir):
except: sys.exit(1)
if __name__ == "__main__":
- print("Configure connections")
+ logging.info("Configure connections")
global conn
conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
- print("Starting Superset")
+ logging.info("Starting Superset")
try:
start_superset(superset_dir)
except Exception as err:
- print('Error: {0}'.format(err))
+ logging.error('Error: {0}'.format(err))
sys.exit(1)
conn.close()
diff --git a/infrastructure-provisioning/src/general/scripts/os/tensor-rstudio_dataengine_create_configs.py b/infrastructure-provisioning/src/general/scripts/os/tensor-rstudio_dataengine_create_configs.py
index c8965b6..3255bb0 100644
--- a/infrastructure-provisioning/src/general/scripts/os/tensor-rstudio_dataengine_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/tensor-rstudio_dataengine_create_configs.py
@@ -28,6 +28,7 @@ from datalab.actions_lib import *
from datalab.common_lib import *
from datalab.fab import *
from datalab.notebook_lib import *
+from datalab.logger import logging
from fabric import *
parser = argparse.ArgumentParser()
@@ -63,7 +64,7 @@ def configure_rstudio():
subprocess.run('''R -e "source('/home/{}/.Rprofile')"'''.format(args.os_user), shell=True, check=True)
subprocess.run('touch /home/' + args.os_user + '/.ensure_dir/rstudio_dataengine_ensured', shell=True, check=True)
except Exception as err:
- print('Error: {0}'.format(err))
+ logging.error('Error: {0}'.format(err))
sys.exit(1)
else:
try:
@@ -77,7 +78,7 @@ def configure_rstudio():
args.os_user + '/.Rprofile', shell=True, check=True)
subprocess.run('''R -e "source('/home/{}/.Rprofile')"'''.format(args.os_user), shell=True, check=True)
except Exception as err:
- print('Error: {0}'.format(err))
+ logging.error('Error: {0}'.format(err))
sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/scripts/os/tensor_install_dataengine_kernels.py b/infrastructure-provisioning/src/general/scripts/os/tensor_install_dataengine_kernels.py
index e6d27aa..c155e42 100644
--- a/infrastructure-provisioning/src/general/scripts/os/tensor_install_dataengine_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/os/tensor_install_dataengine_kernels.py
@@ -25,6 +25,7 @@ import argparse
import os
from datalab.fab import *
from datalab.meta_lib import *
+from datalab.logger import logging
from fabric import *
from patchwork.files import exists
from patchwork import files
@@ -100,7 +101,7 @@ def install_sparkamagic_kernels(args):
spark_master_ip, args.os_user))
datalab.fab.conn.sudo('sudo chown -R {0}:{0} /home/{0}/.sparkmagic/'.format(args.os_user))
except Exception as err:
- print(err)
+ logging.error(err)
sys.exit(1)
def create_inactivity_log(master_ip, hoststring):
diff --git a/infrastructure-provisioning/src/general/scripts/os/zeppelin_dataengine_create_configs.py b/infrastructure-provisioning/src/general/scripts/os/zeppelin_dataengine_create_configs.py
index 7f424eb..52cf241 100644
--- a/infrastructure-provisioning/src/general/scripts/os/zeppelin_dataengine_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/zeppelin_dataengine_create_configs.py
@@ -29,6 +29,7 @@ from datalab.actions_lib import *
from datalab.common_lib import *
from datalab.fab import *
from datalab.notebook_lib import *
+from datalab.logger import logging
from fabric import *
parser = argparse.ArgumentParser()
@@ -146,7 +147,7 @@ def configure_zeppelin_dataengine_interpreter(cluster_name, cluster_dir, os_user
subprocess.run('sleep 5', shell=True, check=True)
subprocess.run('touch /home/' + os_user + '/.ensure_dir/dataengine_' + cluster_name + '_interpreter_ensured', shell=True, check=True)
except Exception as err:
- print('Error: {0}'.format(err))
+ logging.error('Error: {0}'.format(err))
sys.exit(1)
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org