You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@datalab.apache.org by lf...@apache.org on 2021/02/17 09:14:00 UTC
[incubator-datalab] 02/02: [DATALAB-2091]: changed how connection
is established
This is an automated email from the ASF dual-hosted git repository.
lfrolov pushed a commit to branch DATALAB-2091
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git
commit 0346745b621f735f257ff30b16b70be496d9e663
Author: leonidfrolov <fr...@gmail.com>
AuthorDate: Wed Feb 17 11:13:35 2021 +0200
[DATALAB-2091]: changed how connection is established
---
.../scripts/deploy_repository/deploy_repository.py | 2 +-
.../src/base/scripts/install_prerequisites.py | 16 ++++++++--------
.../src/base/scripts/install_user_key.py | 2 +-
.../src/dataengine/scripts/configure_dataengine.py | 2 +-
.../deeplearning/scripts/configure_deep_learning_node.py | 2 +-
.../src/edge/scripts/configure_http_proxy.py | 2 +-
.../src/edge/scripts/configure_nginx_reverse_proxy.py | 2 +-
.../src/general/lib/azure/actions_lib.py | 4 ++--
.../src/general/lib/os/debian/common_lib.py | 6 +++---
infrastructure-provisioning/src/general/lib/os/fab.py | 4 ++--
.../src/general/lib/os/redhat/common_lib.py | 6 +++---
.../general/scripts/aws/common_download_git_certfile.py | 2 +-
.../general/scripts/aws/common_remove_remote_kernels.py | 2 +-
.../general/scripts/aws/dataengine-service_configure.py | 2 +-
.../scripts/azure/common_download_git_certfile.py | 2 +-
.../scripts/azure/common_remove_remote_kernels.py | 2 +-
.../src/general/scripts/azure/rstudio_change_pass.py | 2 +-
.../general/scripts/gcp/common_download_git_certfile.py | 2 +-
.../general/scripts/gcp/dataengine-service_configure.py | 2 +-
.../src/general/scripts/os/check_inactivity.py | 2 +-
.../src/general/scripts/os/common_clean_instance.py | 2 +-
.../src/general/scripts/os/common_configure_proxy.py | 2 +-
.../general/scripts/os/common_configure_reverse_proxy.py | 2 +-
.../src/general/scripts/os/common_configure_spark.py | 2 +-
.../src/general/scripts/os/configure_proxy_for_docker.py | 2 +-
.../src/general/scripts/os/get_list_available_pkgs.py | 2 +-
.../src/general/scripts/os/install_additional_libs.py | 2 +-
.../src/general/scripts/os/jupyterlab_container_start.py | 2 +-
.../src/general/scripts/os/manage_git_creds.py | 2 +-
.../src/general/scripts/os/reconfigure_spark.py | 2 +-
.../src/general/scripts/os/superset_start.py | 2 +-
.../src/general/scripts/os/update_inactivity_on_start.py | 2 +-
.../src/jupyter/scripts/configure_jupyter_node.py | 2 +-
.../src/jupyterlab/scripts/configure_jupyterlab_node.py | 2 +-
.../src/project/scripts/configure_http_proxy.py | 2 +-
.../src/project/scripts/configure_nftables.py | 2 +-
.../src/project/scripts/configure_nginx_reverse_proxy.py | 2 +-
.../src/rstudio/scripts/configure_rstudio_node.py | 2 +-
.../src/ssn/scripts/configure_docker.py | 2 +-
.../src/ssn/scripts/configure_gitlab.py | 4 ++--
.../src/ssn/scripts/configure_ssn_node.py | 2 +-
.../src/ssn/scripts/configure_ui.py | 2 +-
.../src/ssn/scripts/upload_response_file.py | 2 +-
.../src/superset/scripts/configure_superset_node.py | 2 +-
.../scripts/configure_tensor-rstudio_node.py | 2 +-
.../src/tensor/scripts/configure_tensor_node.py | 2 +-
.../src/zeppelin/scripts/configure_zeppelin_node.py | 2 +-
47 files changed, 61 insertions(+), 61 deletions(-)
diff --git a/infrastructure-provisioning/scripts/deploy_repository/deploy_repository.py b/infrastructure-provisioning/scripts/deploy_repository/deploy_repository.py
index af77ea0..d91669d 100644
--- a/infrastructure-provisioning/scripts/deploy_repository/deploy_repository.py
+++ b/infrastructure-provisioning/scripts/deploy_repository/deploy_repository.py
@@ -1690,7 +1690,7 @@ if __name__ == "__main__":
sys.exit(1)
print("CONFIGURE CONNECTIONS")
- datalab.fab.init_datalab_connection(ec2_ip_address, 'ubuntu', key_filename)
+ conn = datalab.fab.init_datalab_connection(ec2_ip_address, 'ubuntu', key_filename)
print("CONFIGURE LOCAL REPOSITORY")
try:
print('CREATING DATALAB USER')
diff --git a/infrastructure-provisioning/src/base/scripts/install_prerequisites.py b/infrastructure-provisioning/src/base/scripts/install_prerequisites.py
index bc49ce8..d79d1d2 100644
--- a/infrastructure-provisioning/src/base/scripts/install_prerequisites.py
+++ b/infrastructure-provisioning/src/base/scripts/install_prerequisites.py
@@ -41,7 +41,7 @@ parser.add_argument('--region', type=str, default='')
args = parser.parse_args()
-def create_china_pip_conf_file():
+def create_china_pip_conf_file(conn):
if not exists('/home/{}/pip_china_ensured'.format(args.user)):
conn.sudo('touch /etc/pip.conf')
conn.sudo('echo "[global]" >> /etc/pip.conf')
@@ -53,23 +53,23 @@ def create_china_pip_conf_file():
if __name__ == "__main__":
print("Configure connections")
- datalab.fab.init_datalab_connection(args.hostname, args.user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.hostname, args.user, args.keyfile)
deeper_config = json.loads(args.additional_config)
if args.region == 'cn-north-1':
- change_pkg_repos()
- create_china_pip_conf_file()
+ change_pkg_repos(conn)
+ create_china_pip_conf_file(conn)
print("Updating hosts file")
- update_hosts_file(args.user)
+ update_hosts_file(args.user, conn)
print("Updating repositories and installing requested tools.")
- ensure_pkg(args.user)
+ ensure_pkg(args.user, conn)
print("Installing python packages: {}".format(args.pip_packages))
- ensure_pip(args.pip_packages)
+ ensure_pip(args.pip_packages, conn)
print("Installing NTPd")
- ensure_ntpd(args.user, args.edge_private_ip)
+ ensure_ntpd(args.user, args.edge_private_ip, conn)
datalab.fab.close_connection()
diff --git a/infrastructure-provisioning/src/base/scripts/install_user_key.py b/infrastructure-provisioning/src/base/scripts/install_user_key.py
index acfb3bb..f0ba0d9 100644
--- a/infrastructure-provisioning/src/base/scripts/install_user_key.py
+++ b/infrastructure-provisioning/src/base/scripts/install_user_key.py
@@ -60,7 +60,7 @@ def copy_key(config):
if __name__ == "__main__":
print("Configure connections")
try:
- datalab.fab.init_datalab_connection(args.hostname, args.user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.hostname, args.user, args.keyfile)
deeper_config = json.loads(args.additional_config)
except:
print('Fail connection')
diff --git a/infrastructure-provisioning/src/dataengine/scripts/configure_dataengine.py b/infrastructure-provisioning/src/dataengine/scripts/configure_dataengine.py
index 7c8a5dc..4f5cdcc 100644
--- a/infrastructure-provisioning/src/dataengine/scripts/configure_dataengine.py
+++ b/infrastructure-provisioning/src/dataengine/scripts/configure_dataengine.py
@@ -117,7 +117,7 @@ def start_spark(os_user, master_ip, node):
if __name__ == "__main__":
print("Configure connections")
- datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
# PREPARE DISK
print("Prepare .ensure directory")
diff --git a/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py b/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py
index 9e96083..b4cd5c6 100644
--- a/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py
+++ b/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py
@@ -93,7 +93,7 @@ def install_itorch(os_user):
if __name__ == "__main__":
print("Configure connections")
- datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
# PREPARE DISK
print("Prepare .ensure directory")
diff --git a/infrastructure-provisioning/src/edge/scripts/configure_http_proxy.py b/infrastructure-provisioning/src/edge/scripts/configure_http_proxy.py
index 6765d59..e48b39f 100644
--- a/infrastructure-provisioning/src/edge/scripts/configure_http_proxy.py
+++ b/infrastructure-provisioning/src/edge/scripts/configure_http_proxy.py
@@ -41,7 +41,7 @@ args = parser.parse_args()
if __name__ == "__main__":
print("Configure connections")
try:
- datalab.fab.init_datalab_connection(args.hostname, args.user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.hostname, args.user, args.keyfile)
deeper_config = json.loads(args.additional_config)
except:
sys.exit(2)
diff --git a/infrastructure-provisioning/src/edge/scripts/configure_nginx_reverse_proxy.py b/infrastructure-provisioning/src/edge/scripts/configure_nginx_reverse_proxy.py
index 9658d74..f464ce2 100644
--- a/infrastructure-provisioning/src/edge/scripts/configure_nginx_reverse_proxy.py
+++ b/infrastructure-provisioning/src/edge/scripts/configure_nginx_reverse_proxy.py
@@ -46,7 +46,7 @@ if __name__ == "__main__":
print("Configure connections")
try:
- datalab.fab.init_datalab_connection(args.hostname, args.user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.hostname, args.user, args.keyfile)
except Exception as err:
print("Failed establish connection. Excpeption: " + str(err))
sys.exit(1)
diff --git a/infrastructure-provisioning/src/general/lib/azure/actions_lib.py b/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
index f0d6c67..ae81399 100644
--- a/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
+++ b/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
@@ -1222,7 +1222,7 @@ def configure_dataengine_spark(cluster_name, jars_dir, cluster_dir, datalake_ena
def remount_azure_disk(creds=False, os_user='', hostname='', keyfile=''):
if creds:
- datalab.fab.init_datalab_connection(hostname, os_user, keyfile)
+ conn = datalab.fab.init_datalab_connection(hostname, os_user, keyfile)
conn.sudo('sed -i "/azure_resource-part1/ s|/mnt|/media|g" /etc/fstab')
conn.sudo('grep "azure_resource-part1" /etc/fstab > /dev/null && umount -f /mnt/ || true')
conn.sudo('mount -a')
@@ -1232,7 +1232,7 @@ def remount_azure_disk(creds=False, os_user='', hostname='', keyfile=''):
def prepare_vm_for_image(creds=False, os_user='', hostname='', keyfile=''):
if creds:
- datalab.fab.init_datalab_connection(hostname, os_user, keyfile)
+ conn = datalab.fab.init_datalab_connection(hostname, os_user, keyfile)
conn.sudo('waagent -deprovision -force')
if creds:
datalab.fab.close_connection()
diff --git a/infrastructure-provisioning/src/general/lib/os/debian/common_lib.py b/infrastructure-provisioning/src/general/lib/os/debian/common_lib.py
index ec33468..fbf2e1b 100644
--- a/infrastructure-provisioning/src/general/lib/os/debian/common_lib.py
+++ b/infrastructure-provisioning/src/general/lib/os/debian/common_lib.py
@@ -99,7 +99,7 @@ def manage_pkg(command, environment, requisites):
def ensure_pkg(user, requisites='linux-headers-generic python3-pip python3-dev python3-virtualenv '
'groff gcc vim less git wget '
'libssl-dev unattended-upgrades nmap '
- 'libffi-dev unzip libxml2-dev haveged'):
+ 'libffi-dev unzip libxml2-dev haveged', conn):
try:
if not exists('/home/{}/.ensure_dir/pkg_upgraded'.format(user)):
count = 0
@@ -141,7 +141,7 @@ def renew_gpg_key():
sys.exit(1)
-def change_pkg_repos():
+def change_pkg_repos(conn):
if not exists('/tmp/pkg_china_ensured'):
conn.put('/root/files/sources.list', '/tmp/sources.list')
conn.sudo('mv /tmp/sources.list /etc/apt/sources.list')
@@ -159,7 +159,7 @@ def find_java_path_local():
return java_path
-def ensure_ntpd(user, edge_private_ip=''):
+def ensure_ntpd(user, edge_private_ip='', conn):
try:
if not exists('/home/{}/.ensure_dir/ntpd_ensured'.format(user)):
conn.sudo('timedatectl set-ntp no')
diff --git a/infrastructure-provisioning/src/general/lib/os/fab.py b/infrastructure-provisioning/src/general/lib/os/fab.py
index fd34ce9..f81390c 100644
--- a/infrastructure-provisioning/src/general/lib/os/fab.py
+++ b/infrastructure-provisioning/src/general/lib/os/fab.py
@@ -38,7 +38,7 @@ from fabric import *
from patchwork.files import exists
-def ensure_pip(requisites):
+def ensure_pip(requisites, conn):
try:
if not exists('/home/{}/.ensure_dir/pip_path_added'.format(os.environ['conf_os_user'])):
conn.sudo('echo PATH=$PATH:/usr/local/bin/:/opt/spark/bin/ >> /etc/profile')
@@ -912,7 +912,7 @@ def update_zeppelin_interpreters(multiple_clusters, r_enabled, interpreter_mode=
sys.exit(1)
-def update_hosts_file(os_user):
+def update_hosts_file(os_user, conn):
try:
if not exists('/home/{}/.ensure_dir/hosts_file_updated'.format(os_user)):
conn.sudo('sed -i "s/^127.0.0.1 localhost/127.0.0.1 localhost localhost.localdomain/g" /etc/hosts')
diff --git a/infrastructure-provisioning/src/general/lib/os/redhat/common_lib.py b/infrastructure-provisioning/src/general/lib/os/redhat/common_lib.py
index bd3a115..a9f2d6d 100644
--- a/infrastructure-provisioning/src/general/lib/os/redhat/common_lib.py
+++ b/infrastructure-provisioning/src/general/lib/os/redhat/common_lib.py
@@ -56,7 +56,7 @@ def manage_pkg(command, environment, requisites):
except:
sys.exit(1)
-def ensure_pkg(user, requisites='git vim gcc python-devel openssl-devel nmap libffi libffi-devel unzip libxml2-devel'):
+def ensure_pkg(user, requisites='git vim gcc python-devel openssl-devel nmap libffi libffi-devel unzip libxml2-devel', conn):
try:
if not exists('/home/{}/.ensure_dir/pkg_upgraded'.format(user)):
print("Updating repositories and installing requested tools: {}".format(requisites))
@@ -88,7 +88,7 @@ def ensure_pkg(user, requisites='git vim gcc python-devel openssl-devel nmap lib
sys.exit(1)
-def change_pkg_repos():
+def change_pkg_repos(conn):
if not exists('/tmp/pkg_china_ensured'):
conn.put('/root/files/sources.list', '/tmp/sources.list')
conn.sudo('mv /tmp/sources.list /etc/yum.repos.d/CentOS-Base-aliyun.repo')
@@ -105,7 +105,7 @@ def find_java_path_local():
return java_path
-def ensure_ntpd(user, edge_private_ip=''):
+def ensure_ntpd(user, edge_private_ip='', conn):
try:
if not exists('/home/{}/.ensure_dir/ntpd_ensured'.format(user)):
conn.sudo('systemctl disable chronyd')
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_download_git_certfile.py b/infrastructure-provisioning/src/general/scripts/aws/common_download_git_certfile.py
index 8297550..dfcff67 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_download_git_certfile.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_download_git_certfile.py
@@ -34,7 +34,7 @@ args = parser.parse_args()
if __name__ == "__main__":
create_aws_config_files()
- datalab.fab.init_datalab_connection(args.notebook_ip, args.os_user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.notebook_ip, args.os_user, args.keyfile)
service_base_name = os.environ['conf_service_base_name'] = replace_multi_symbols(
os.environ['conf_service_base_name'][:20], '-', True)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_remove_remote_kernels.py b/infrastructure-provisioning/src/general/scripts/aws/common_remove_remote_kernels.py
index c52c919..9a5eed2 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_remove_remote_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_remove_remote_kernels.py
@@ -38,7 +38,7 @@ args = parser.parse_args()
if __name__ == "__main__":
print('Configure connections')
- datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
try:
de_clusters, des_clusters = find_cluster_kernels()
diff --git a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_configure.py b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_configure.py
index d1a080f..45b0d28 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_configure.py
@@ -82,7 +82,7 @@ def configure_dataengine_service(instance, emr_conf):
try:
datalab.fab.configure_data_engine_service_pip(emr_conf['instance_ip'], emr_conf['os_user'],
emr_conf['key_path'], True)
- datalab.fab.init_datalab_connection(emr_conf['instance_ip'], emr_conf['os_user'], emr_conf['key_path'])
+ conn = datalab.fab.init_datalab_connection(emr_conf['instance_ip'], emr_conf['os_user'], emr_conf['key_path'])
conn.sudo('echo "[main]" > /etc/yum/pluginconf.d/priorities.conf ; echo "enabled = 0" >> '
'/etc/yum/pluginconf.d/priorities.conf')
manage_pkg('-y install', 'remote', 'R-devel')
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_download_git_certfile.py b/infrastructure-provisioning/src/general/scripts/azure/common_download_git_certfile.py
index 8dae105..8841217 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_download_git_certfile.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_download_git_certfile.py
@@ -40,7 +40,7 @@ container_name = ('{}-ssn-bucket'.format(os.environ['conf_service_base_name'])).
gitlab_certfile = os.environ['conf_gitlab_certfile']
if __name__ == "__main__":
- datalab.fab.init_datalab_connection(args.notebook_ip, args.os_user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.notebook_ip, args.os_user, args.keyfile)
for storage_account in AzureMeta().list_storage_accounts(resource_group_name):
if ssn_storage_account_tag == storage_account.tags["Name"]:
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_remove_remote_kernels.py b/infrastructure-provisioning/src/general/scripts/azure/common_remove_remote_kernels.py
index e68628b..6e91b95 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_remove_remote_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_remove_remote_kernels.py
@@ -38,7 +38,7 @@ args = parser.parse_args()
if __name__ == "__main__":
print('Configure connections')
- datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
try:
de_clusters, des_clusters = find_cluster_kernels()
diff --git a/infrastructure-provisioning/src/general/scripts/azure/rstudio_change_pass.py b/infrastructure-provisioning/src/general/scripts/azure/rstudio_change_pass.py
index 42a7f98..a9cd20f 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/rstudio_change_pass.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/rstudio_change_pass.py
@@ -35,7 +35,7 @@ args = parser.parse_args()
if __name__ == "__main__":
print("Configure connections")
- datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
print("Setting password for Rstudio user.")
try:
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_download_git_certfile.py b/infrastructure-provisioning/src/general/scripts/gcp/common_download_git_certfile.py
index 8d43156..f1b5f16 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_download_git_certfile.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_download_git_certfile.py
@@ -33,7 +33,7 @@ parser.add_argument('--os_user', type=str, default='')
args = parser.parse_args()
if __name__ == "__main__":
- datalab.fab.init_datalab_connection(args.notebook_ip, args.os_user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.notebook_ip, args.os_user, args.keyfile)
bucket_name = ('{0}-{1}-{2}-bucket'.format(os.environ['conf_service_base_name'], os.environ['project_name'],
os.environ['endpoint_name'])).lower().replace('_', '-')
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_configure.py
index 34d0e8b..2fa8504 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_configure.py
@@ -59,7 +59,7 @@ def configure_dataengine_service(instance, dataproc_conf):
logging.info('[CONFIGURE DATAENGINE SERVICE]')
print('[CONFIGURE DATAENGINE SERVICE]')
try:
- datalab.fab.init_datalab_connection(dataproc_conf['instance_ip'], dataproc_conf['datalab_ssh_user'], dataproc_conf['key_path'])
+ conn = datalab.fab.init_datalab_connection(dataproc_conf['instance_ip'], dataproc_conf['datalab_ssh_user'], dataproc_conf['key_path'])
datalab.notebook_lib.install_os_pkg([['python3-pip', 'N/A']])
datalab.fab.configure_data_engine_service_pip(dataproc_conf['instance_ip'],
dataproc_conf['datalab_ssh_user'],
diff --git a/infrastructure-provisioning/src/general/scripts/os/check_inactivity.py b/infrastructure-provisioning/src/general/scripts/os/check_inactivity.py
index 29d0043..012c433 100644
--- a/infrastructure-provisioning/src/general/scripts/os/check_inactivity.py
+++ b/infrastructure-provisioning/src/general/scripts/os/check_inactivity.py
@@ -38,7 +38,7 @@ args = parser.parse_args()
if __name__ == "__main__":
- datalab.fab.init_datalab_connection(args.instance_ip, args.os_user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.instance_ip, args.os_user, args.keyfile)
inactivity_dir = '/opt/inactivity/'
if args.resource_type == 'dataengine':
diff --git a/infrastructure-provisioning/src/general/scripts/os/common_clean_instance.py b/infrastructure-provisioning/src/general/scripts/os/common_clean_instance.py
index a185daa..eda8a82 100644
--- a/infrastructure-provisioning/src/general/scripts/os/common_clean_instance.py
+++ b/infrastructure-provisioning/src/general/scripts/os/common_clean_instance.py
@@ -117,7 +117,7 @@ def clean_tensor_rstudio():
if __name__ == "__main__":
print('Configure connections')
- datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
if os.environ['conf_cloud_provider'] == 'azure':
de_master_name = '{}-{}-{}-de-{}-m'.format(
diff --git a/infrastructure-provisioning/src/general/scripts/os/common_configure_proxy.py b/infrastructure-provisioning/src/general/scripts/os/common_configure_proxy.py
index ab1b53c..d122467 100644
--- a/infrastructure-provisioning/src/general/scripts/os/common_configure_proxy.py
+++ b/infrastructure-provisioning/src/general/scripts/os/common_configure_proxy.py
@@ -40,7 +40,7 @@ args = parser.parse_args()
##############
if __name__ == "__main__":
print("Configure connections")
- datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
deeper_config = json.loads(args.additional_config)
print("Enabling proxy for notebook server for repositories access.")
diff --git a/infrastructure-provisioning/src/general/scripts/os/common_configure_reverse_proxy.py b/infrastructure-provisioning/src/general/scripts/os/common_configure_reverse_proxy.py
index e11de15..ce35b03 100644
--- a/infrastructure-provisioning/src/general/scripts/os/common_configure_reverse_proxy.py
+++ b/infrastructure-provisioning/src/general/scripts/os/common_configure_reverse_proxy.py
@@ -103,7 +103,7 @@ if __name__ == "__main__":
sys.exit(1)
print("Configure connections")
- datalab.fab.init_datalab_connection(args.edge_hostname, args.os_user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.edge_hostname, args.os_user, args.keyfile)
conn.put('/tmp/{}.conf'.format(conf_file_name), '/usr/local/openresty/nginx/conf/locations', use_sudo=True)
conn.sudo('service openresty reload')
diff --git a/infrastructure-provisioning/src/general/scripts/os/common_configure_spark.py b/infrastructure-provisioning/src/general/scripts/os/common_configure_spark.py
index 2cd1806..7e8b1d3 100644
--- a/infrastructure-provisioning/src/general/scripts/os/common_configure_spark.py
+++ b/infrastructure-provisioning/src/general/scripts/os/common_configure_spark.py
@@ -82,7 +82,7 @@ def add_custom_spark_properties(cluster_name):
if __name__ == "__main__":
print('Configure connections')
- datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
if (args.spark_conf != ''):
update_spark_defaults_conf(args.spark_conf)
diff --git a/infrastructure-provisioning/src/general/scripts/os/configure_proxy_for_docker.py b/infrastructure-provisioning/src/general/scripts/os/configure_proxy_for_docker.py
index 6647dca..b2f1e53 100644
--- a/infrastructure-provisioning/src/general/scripts/os/configure_proxy_for_docker.py
+++ b/infrastructure-provisioning/src/general/scripts/os/configure_proxy_for_docker.py
@@ -36,7 +36,7 @@ https_file = '/etc/systemd/system/docker.service.d/https-proxy.conf'
if __name__ == "__main__":
print("Configure connections")
- datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
print("Configuring proxy for docker")
try:
conn.sudo('mkdir -p /etc/systemd/system/docker.service.d')
diff --git a/infrastructure-provisioning/src/general/scripts/os/get_list_available_pkgs.py b/infrastructure-provisioning/src/general/scripts/os/get_list_available_pkgs.py
index 402d2ea..ac87479 100644
--- a/infrastructure-provisioning/src/general/scripts/os/get_list_available_pkgs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/get_list_available_pkgs.py
@@ -75,7 +75,7 @@ def get_uncategorised_pip_pkgs(all_pkgs_pip2, all_pkgs_pip3):
if __name__ == "__main__":
- datalab.fab.init_datalab_connection(args.instance_ip, args.os_user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.instance_ip, args.os_user, args.keyfile)
all_pkgs = dict()
if args.group == 'os_pkg':
all_pkgs['os_pkg'] = get_available_os_pkgs()
diff --git a/infrastructure-provisioning/src/general/scripts/os/install_additional_libs.py b/infrastructure-provisioning/src/general/scripts/os/install_additional_libs.py
index 89c95bf..3091202 100644
--- a/infrastructure-provisioning/src/general/scripts/os/install_additional_libs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/install_additional_libs.py
@@ -38,7 +38,7 @@ args = parser.parse_args()
if __name__ == "__main__":
- datalab.fab.init_datalab_connection(args.instance_ip, args.os_user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.instance_ip, args.os_user, args.keyfile)
print('Installing libraries: {}'.format(args.libs))
general_status = list()
diff --git a/infrastructure-provisioning/src/general/scripts/os/jupyterlab_container_start.py b/infrastructure-provisioning/src/general/scripts/os/jupyterlab_container_start.py
index 592c98e..e2dd16e 100644
--- a/infrastructure-provisioning/src/general/scripts/os/jupyterlab_container_start.py
+++ b/infrastructure-provisioning/src/general/scripts/os/jupyterlab_container_start.py
@@ -50,7 +50,7 @@ def start_jupyterlab_container(jupyterlab_dir):
if __name__ == "__main__":
print("Configure connections")
- datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
print("Starting Jupyter container")
try:
start_jupyterlab_container(jupyterlab_dir)
diff --git a/infrastructure-provisioning/src/general/scripts/os/manage_git_creds.py b/infrastructure-provisioning/src/general/scripts/os/manage_git_creds.py
index 22fe55b..6938265 100644
--- a/infrastructure-provisioning/src/general/scripts/os/manage_git_creds.py
+++ b/infrastructure-provisioning/src/general/scripts/os/manage_git_creds.py
@@ -37,7 +37,7 @@ parser.add_argument('--os_user', type=str, default='')
args = parser.parse_args()
if __name__ == "__main__":
- datalab.fab.init_datalab_connection(args.notebook_ip, args.os_user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.notebook_ip, args.os_user, args.keyfile)
gitlab_certfile = os.environ['conf_gitlab_certfile']
if exists('/home/{0}/{1}'.format(args.os_user, gitlab_certfile)):
diff --git a/infrastructure-provisioning/src/general/scripts/os/reconfigure_spark.py b/infrastructure-provisioning/src/general/scripts/os/reconfigure_spark.py
index cacd00f..73ce8af 100644
--- a/infrastructure-provisioning/src/general/scripts/os/reconfigure_spark.py
+++ b/infrastructure-provisioning/src/general/scripts/os/reconfigure_spark.py
@@ -39,7 +39,7 @@ args = parser.parse_args()
if __name__ == "__main__":
- datalab.fab.init_datalab_connection(args.instance_ip, args.os_user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.instance_ip, args.os_user, args.keyfile)
jars_dir = '/opt/jars/'
templates_dir = '/root/templates/'
diff --git a/infrastructure-provisioning/src/general/scripts/os/superset_start.py b/infrastructure-provisioning/src/general/scripts/os/superset_start.py
index e2e974d..c32614c 100644
--- a/infrastructure-provisioning/src/general/scripts/os/superset_start.py
+++ b/infrastructure-provisioning/src/general/scripts/os/superset_start.py
@@ -49,7 +49,7 @@ def start_superset(superset_dir):
if __name__ == "__main__":
print("Configure connections")
- datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
print("Starting Superset")
try:
start_superset(superset_dir)
diff --git a/infrastructure-provisioning/src/general/scripts/os/update_inactivity_on_start.py b/infrastructure-provisioning/src/general/scripts/os/update_inactivity_on_start.py
index 99ad83c..490492e 100644
--- a/infrastructure-provisioning/src/general/scripts/os/update_inactivity_on_start.py
+++ b/infrastructure-provisioning/src/general/scripts/os/update_inactivity_on_start.py
@@ -36,7 +36,7 @@ args = parser.parse_args()
if __name__ == "__main__":
- datalab.fab.init_datalab_connection(args.notebook_ip, args.os_user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.notebook_ip, args.os_user, args.keyfile)
if args.cluster_ip == "none":
kernel = 'local'
diff --git a/infrastructure-provisioning/src/jupyter/scripts/configure_jupyter_node.py b/infrastructure-provisioning/src/jupyter/scripts/configure_jupyter_node.py
index 954a6ec..e28d6d6 100644
--- a/infrastructure-provisioning/src/jupyter/scripts/configure_jupyter_node.py
+++ b/infrastructure-provisioning/src/jupyter/scripts/configure_jupyter_node.py
@@ -72,7 +72,7 @@ gitlab_certfile = os.environ['conf_gitlab_certfile']
##############
if __name__ == "__main__":
print("Configure connections")
- datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
# PREPARE DISK
print("Prepare .ensure directory")
diff --git a/infrastructure-provisioning/src/jupyterlab/scripts/configure_jupyterlab_node.py b/infrastructure-provisioning/src/jupyterlab/scripts/configure_jupyterlab_node.py
index 4b6e8ec..86f907f 100644
--- a/infrastructure-provisioning/src/jupyterlab/scripts/configure_jupyterlab_node.py
+++ b/infrastructure-provisioning/src/jupyterlab/scripts/configure_jupyterlab_node.py
@@ -80,7 +80,7 @@ gitlab_certfile = os.environ['conf_gitlab_certfile']
##############
if __name__ == "__main__":
print("Configure connections")
- datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
# PREPARE DISK
print("Prepare .ensure directory")
diff --git a/infrastructure-provisioning/src/project/scripts/configure_http_proxy.py b/infrastructure-provisioning/src/project/scripts/configure_http_proxy.py
index 4a27cfb..295c77a 100644
--- a/infrastructure-provisioning/src/project/scripts/configure_http_proxy.py
+++ b/infrastructure-provisioning/src/project/scripts/configure_http_proxy.py
@@ -40,7 +40,7 @@ args = parser.parse_args()
if __name__ == "__main__":
print("Configure connections")
try:
- datalab.fab.init_datalab_connection(args.hostname, args.user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.hostname, args.user, args.keyfile)
deeper_config = json.loads(args.additional_config)
except:
sys.exit(2)
diff --git a/infrastructure-provisioning/src/project/scripts/configure_nftables.py b/infrastructure-provisioning/src/project/scripts/configure_nftables.py
index 53ff094..cd4e35f 100644
--- a/infrastructure-provisioning/src/project/scripts/configure_nftables.py
+++ b/infrastructure-provisioning/src/project/scripts/configure_nftables.py
@@ -40,7 +40,7 @@ args = parser.parse_args()
if __name__ == "__main__":
print("Configure connections")
try:
- datalab.fab.init_datalab_connection(args.hostname, args.user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.hostname, args.user, args.keyfile)
deeper_config = json.loads(args.additional_config)
except:
sys.exit(2)
diff --git a/infrastructure-provisioning/src/project/scripts/configure_nginx_reverse_proxy.py b/infrastructure-provisioning/src/project/scripts/configure_nginx_reverse_proxy.py
index 5601120..bf4ab58 100644
--- a/infrastructure-provisioning/src/project/scripts/configure_nginx_reverse_proxy.py
+++ b/infrastructure-provisioning/src/project/scripts/configure_nginx_reverse_proxy.py
@@ -49,7 +49,7 @@ if __name__ == "__main__":
print("Configure connections")
try:
- datalab.fab.init_datalab_connection(args.hostname, args.user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.hostname, args.user, args.keyfile)
except Exception as err:
print("Failed establish connection. Excpeption: " + str(err))
sys.exit(1)
diff --git a/infrastructure-provisioning/src/rstudio/scripts/configure_rstudio_node.py b/infrastructure-provisioning/src/rstudio/scripts/configure_rstudio_node.py
index d151708..e401eae 100644
--- a/infrastructure-provisioning/src/rstudio/scripts/configure_rstudio_node.py
+++ b/infrastructure-provisioning/src/rstudio/scripts/configure_rstudio_node.py
@@ -66,7 +66,7 @@ gitlab_certfile = os.environ['conf_gitlab_certfile']
##############
if __name__ == "__main__":
print("Configure connections")
- datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
# PREPARE DISK
print("Prepare .ensure directory")
diff --git a/infrastructure-provisioning/src/ssn/scripts/configure_docker.py b/infrastructure-provisioning/src/ssn/scripts/configure_docker.py
index 3ced3ef..c4fe770 100644
--- a/infrastructure-provisioning/src/ssn/scripts/configure_docker.py
+++ b/infrastructure-provisioning/src/ssn/scripts/configure_docker.py
@@ -189,7 +189,7 @@ def configure_guacamole():
if __name__ == "__main__":
print("Configure connections")
try:
- datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
deeper_config = json.loads(args.additional_config)
except:
sys.exit(2)
diff --git a/infrastructure-provisioning/src/ssn/scripts/configure_gitlab.py b/infrastructure-provisioning/src/ssn/scripts/configure_gitlab.py
index 70643c3..d5796f2 100644
--- a/infrastructure-provisioning/src/ssn/scripts/configure_gitlab.py
+++ b/infrastructure-provisioning/src/ssn/scripts/configure_gitlab.py
@@ -46,7 +46,7 @@ def create_user(os_user):
initial_user = 'ec2-user'
sudo_group = 'wheel'
- datalab.fab.init_datalab_connection(args.instance_ip, initial_user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.instance_ip, initial_user, args.keyfile)
try:
conn.sudo('useradd -m -G {1} -s /bin/bash {0}'.format(os_user, sudo_group))
@@ -185,7 +185,7 @@ def summary():
if __name__ == "__main__":
create_user(os.environ['conf_os_user'])
- datalab.fab.init_datalab_connection(args.instance_ip, os.environ['conf_os_user'], args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.instance_ip, os.environ['conf_os_user'], args.keyfile)
prepare_config()
install_gitlab()
diff --git a/infrastructure-provisioning/src/ssn/scripts/configure_ssn_node.py b/infrastructure-provisioning/src/ssn/scripts/configure_ssn_node.py
index d333534..55459b4 100644
--- a/infrastructure-provisioning/src/ssn/scripts/configure_ssn_node.py
+++ b/infrastructure-provisioning/src/ssn/scripts/configure_ssn_node.py
@@ -213,7 +213,7 @@ def docker_build_script():
if __name__ == "__main__":
print("Configure connections")
try:
- datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
host_string = args.os_user + '@' + args.hostname
deeper_config = json.loads(args.additional_config)
except:
diff --git a/infrastructure-provisioning/src/ssn/scripts/configure_ui.py b/infrastructure-provisioning/src/ssn/scripts/configure_ui.py
index ec9faf0..68b3fec 100644
--- a/infrastructure-provisioning/src/ssn/scripts/configure_ui.py
+++ b/infrastructure-provisioning/src/ssn/scripts/configure_ui.py
@@ -204,7 +204,7 @@ def build_ui():
if __name__ == "__main__":
print("Configure connections")
try:
- datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
host_string = args.os_user + '@' + args.hostname
deeper_config = json.loads(args.additional_config)
except:
diff --git a/infrastructure-provisioning/src/ssn/scripts/upload_response_file.py b/infrastructure-provisioning/src/ssn/scripts/upload_response_file.py
index 93cfbd4..53d322c 100644
--- a/infrastructure-provisioning/src/ssn/scripts/upload_response_file.py
+++ b/infrastructure-provisioning/src/ssn/scripts/upload_response_file.py
@@ -38,7 +38,7 @@ args = parser.parse_args()
def upload_response_file(instance_name, local_log_filepath, os_user):
print('Connect to SSN instance with hostname: {0} and name: {1}'.format(args.instance_hostname, instance_name))
pkey = "{}{}.pem".format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
- datalab.fab.init_datalab_connection(args.instance_hostname, os_user, pkey)
+ conn = datalab.fab.init_datalab_connection(args.instance_hostname, os_user, pkey)
try:
conn.put('/root/result.json', '/home/{}/{}.json'.format(os_user, os.environ['request_id']))
conn.sudo('mv /home/{}/{}.json {}tmp/result/'.format(os_user, os.environ['request_id'],
diff --git a/infrastructure-provisioning/src/superset/scripts/configure_superset_node.py b/infrastructure-provisioning/src/superset/scripts/configure_superset_node.py
index b948970..1f163c9 100644
--- a/infrastructure-provisioning/src/superset/scripts/configure_superset_node.py
+++ b/infrastructure-provisioning/src/superset/scripts/configure_superset_node.py
@@ -51,7 +51,7 @@ gitlab_certfile = os.environ['conf_gitlab_certfile']
##############
if __name__ == "__main__":
print("Configure connections")
- datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
# PREPARE DISK
print("Prepare .ensure directory")
diff --git a/infrastructure-provisioning/src/tensor-rstudio/scripts/configure_tensor-rstudio_node.py b/infrastructure-provisioning/src/tensor-rstudio/scripts/configure_tensor-rstudio_node.py
index 7a118e1..8f7f103 100644
--- a/infrastructure-provisioning/src/tensor-rstudio/scripts/configure_tensor-rstudio_node.py
+++ b/infrastructure-provisioning/src/tensor-rstudio/scripts/configure_tensor-rstudio_node.py
@@ -74,7 +74,7 @@ r_libs = ['R6', 'pbdZMQ', 'RCurl', 'reshape2', 'caTools={}'.format(os.environ['n
##############
if __name__ == "__main__":
print("Configure connections")
- datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
# PREPARE DISK
print("Prepare .ensure directory")
diff --git a/infrastructure-provisioning/src/tensor/scripts/configure_tensor_node.py b/infrastructure-provisioning/src/tensor/scripts/configure_tensor_node.py
index 50b1f51..137e6d4 100644
--- a/infrastructure-provisioning/src/tensor/scripts/configure_tensor_node.py
+++ b/infrastructure-provisioning/src/tensor/scripts/configure_tensor_node.py
@@ -73,7 +73,7 @@ cudnn_file_name = os.environ['notebook_cudnn_file_name']
##############
if __name__ == "__main__":
print("Configure connections")
- datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
# PREPARE DISK
print("Prepare .ensure directory")
diff --git a/infrastructure-provisioning/src/zeppelin/scripts/configure_zeppelin_node.py b/infrastructure-provisioning/src/zeppelin/scripts/configure_zeppelin_node.py
index 689de40..4a0df1b 100644
--- a/infrastructure-provisioning/src/zeppelin/scripts/configure_zeppelin_node.py
+++ b/infrastructure-provisioning/src/zeppelin/scripts/configure_zeppelin_node.py
@@ -199,7 +199,7 @@ def install_local_livy(args):
##############
if __name__ == "__main__":
print("Configure connections")
- datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
+ conn = datalab.fab.init_datalab_connection(args.hostname, args.os_user, args.keyfile)
deeper_config = json.loads(args.additional_config)
# PREPARE DISK
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org