You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@datalab.apache.org by lf...@apache.org on 2022/09/22 13:32:48 UTC
[incubator-datalab] branch DATALAB-2998 updated: [DATALAB-2998]: hdinsight secret write to Renviron
This is an automated email from the ASF dual-hosted git repository.
lfrolov pushed a commit to branch DATALAB-2998
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git
The following commit(s) were added to refs/heads/DATALAB-2998 by this push:
new 002abe608 [DATALAB-2998]: hdinsight secret write to Renviron
002abe608 is described below
commit 002abe608492b13281659ca5bfb9bdae2a8f29fa
Author: leonidfrolov <fr...@gmail.com>
AuthorDate: Thu Sep 22 16:28:18 2022 +0300
[DATALAB-2998]: hdinsight secret write to Renviron
---
.../src/general/lib/azure/actions_lib.py | 9 +++++++
.../scripts/azure/dataengine-service_configure.py | 28 ++++++++++++++++++++--
2 files changed, 35 insertions(+), 2 deletions(-)
diff --git a/infrastructure-provisioning/src/general/lib/azure/actions_lib.py b/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
index 1e3c23275..9a2158ad5 100644
--- a/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
+++ b/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
@@ -1628,6 +1628,15 @@ def prepare_disk(os_user):
else:
ensure_right_mount_paths()
+def ensure_hdinsight_secret(os_user, computational_name, cluster_password):
+ if not exists(datalab.fab.conn, '/home/{}/.ensure_dir/hdinsight_secret_ensured'.format(os_user)):
+ try:
+ pass
+ datalab.fab.conn.sudo('echo "{}-access-password=\"{}\"" >> /home/{}/.Renviron')
+ datalab.fab.conn.sudo('touch /home/{}/.ensure_dir/hdinsight_secret_ensured'.format(os_user))
+ except Exception as err:
+ print('Error:', str(err))
+ sys.exit(1)
def ensure_local_spark(os_user, spark_link, spark_version, hadoop_version, local_spark_path):
if not exists(datalab.fab.conn,'/home/' + os_user + '/.ensure_dir/local_spark_ensured'):
diff --git a/infrastructure-provisioning/src/general/scripts/azure/dataengine-service_configure.py b/infrastructure-provisioning/src/general/scripts/azure/dataengine-service_configure.py
index cc0dcfa11..271169cff 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/dataengine-service_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/dataengine-service_configure.py
@@ -40,6 +40,14 @@ parser.add_argument('--uuid', type=str, default='')
parser.add_argument('--access_password', type=str, default='')
args = parser.parse_args()
+
+def add_notebook_secret(resource_group_name, instance_name, os_user, keyfile, computational_name, cluster_password):
+ private_ip = AzureMeta.get_private_ip_address(resource_group_name, instance_name)
+ global conn
+ conn = datalab.fab.init_datalab_connection(private_ip, os_user, keyfile)
+ AzureActions.ensure_hdinsight_secret(os_user, computational_name, cluster_password)
+
+
if __name__ == "__main__":
try:
AzureMeta = datalab.meta_lib.AzureMeta()
@@ -56,9 +64,10 @@ if __name__ == "__main__":
hdinsight_conf['project_tag'] = hdinsight_conf['project_name']
hdinsight_conf['endpoint_name'] = os.environ['endpoint_name']
hdinsight_conf['endpoint_tag'] = hdinsight_conf['endpoint_name']
- hdinsight_conf['key_name'] = os.environ['conf_key_name']
hdinsight_conf['hdinsight_master_instance_type'] = os.environ['hdinsight_master_instance_type']
hdinsight_conf['hdinsight_slave_instance_type'] = os.environ['hdinsight_slave_instance_type']
+ hdinsight_conf['key_path'] = '{}/{}.pem'.format(os.environ['conf_key_dir'],
+ os.environ['conf_key_name'])
if 'computational_name' in os.environ:
hdinsight_conf['computational_name'] = os.environ['computational_name']
else:
@@ -71,6 +80,20 @@ if __name__ == "__main__":
hdinsight_conf['cluster_jupyter_url'] = '{}/jupyter/'.format(hdinsight_conf['cluster_url'])
hdinsight_conf['cluster_sparkhistory_url'] = '{}/sparkhistory/'.format(hdinsight_conf['cluster_url'])
hdinsight_conf['cluster_zeppelin_url'] = '{}/zeppelin/'.format(hdinsight_conf['cluster_url'])
+
+ if os.environ["application"] == "rstudio":
+ add_notebook_secret(hdinsight_conf['resource_group_name'], os.environ["notebook_instance_name"],
+ os.environ["conf_os_user"], hdinsight_conf['key_path'],
+ hdinsight_conf['computational_name'], args.access_password)
+ hdinsight_conf['rstudio_livy_connection'] = 'library(sparklyr); ' \
+ 'sc <- spark_connect(master = "{}/livy/", ' \
+ 'version = "3.1.1", method = "livy", ' \
+ 'config = livy_config(username = "{}", ' \
+ 'password = Sys.getenv("{}-access-password")))'\
+ .format(hdinsight_conf['cluster_url'], os.environ["conf_os_user"], hdinsight_conf['computational_name'])
+ else:
+ hdinsight_conf['rstudio_livy_connection'] = ''
+
logging.info('[SUMMARY]')
logging.info("Service base name: {}".format(hdinsight_conf['service_base_name']))
logging.info("Region: {}".format(hdinsight_conf['region']))
@@ -98,7 +121,8 @@ if __name__ == "__main__":
"url": hdinsight_conf['cluster_jupyter_url']},
{"description": "Zeppelin notebook",
"url": hdinsight_conf['cluster_zeppelin_url']}
- ]
+ ],
+ "Connection_string": hdinsight_conf['rstudio_livy_connection']
}
result.write(json.dumps(res))
except Exception as err:
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org