You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@datalab.apache.org by lf...@apache.org on 2021/10/07 10:29:49 UTC
[incubator-datalab] branch DATALAB-2409 updated (0ddff6e -> e885bad)
This is an automated email from the ASF dual-hosted git repository.
lfrolov pushed a change to branch DATALAB-2409
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git.
from 0ddff6e [DATALAB-2409]: added default values for arguments
new 160e3c5 [DATALAB-2409]: fixed function call
new 3fe53c9 [DATALAB-2409]: removed logging duplicate
new 2f278fb [DATALAB-2409]: fixed sed for managing /opt mount on azure
new 4461c32 [DATALAB-2409]: removed r_mirror usage
new 3033891 [DATALAB-2409]: fixed ipykernel version variable
new e885bad [DATALAB-2409]: fixed scikit_learn version variable
The 6 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails. The revisions
listed as "add" were already present in the repository and have only
been added to this reference.
Summary of changes:
.../src/dataengine/scripts/configure_dataengine.py | 3 +-
.../scripts/configure_deep_learning_node.py | 1 -
.../src/general/conf/datalab.ini | 2 +-
.../src/general/lib/azure/actions_lib.py | 2 +-
.../src/general/lib/os/debian/notebook_lib.py | 7 ++--
.../src/general/lib/os/fab.py | 37 +++++++---------------
.../src/general/lib/os/redhat/notebook_lib.py | 7 ++--
.../general/scripts/aws/dataengine_configure.py | 8 ++---
.../general/scripts/aws/deeplearning_configure.py | 5 ++-
.../src/general/scripts/aws/edge_configure.py | 1 -
.../src/general/scripts/aws/jupyter_configure.py | 8 ++---
.../general/scripts/aws/jupyterlab_configure.py | 2 --
.../src/general/scripts/aws/rstudio_configure.py | 9 ++----
.../scripts/aws/tensor-rstudio_configure.py | 4 +--
.../src/general/scripts/aws/zeppelin_configure.py | 6 ++--
.../general/scripts/azure/dataengine_configure.py | 8 ++---
.../scripts/azure/deeplearning_configure.py | 6 ++--
.../src/general/scripts/azure/jupyter_configure.py | 8 ++---
.../general/scripts/azure/jupyterlab_configure.py | 6 ++--
.../src/general/scripts/azure/rstudio_configure.py | 4 +--
.../general/scripts/azure/zeppelin_configure.py | 6 ++--
.../general/scripts/gcp/dataengine_configure.py | 9 +++---
.../general/scripts/gcp/deeplearning_configure.py | 4 +--
.../src/general/scripts/gcp/jupyter_configure.py | 4 +--
.../general/scripts/gcp/jupyterlab_configure.py | 5 ++-
.../src/general/scripts/gcp/rstudio_configure.py | 4 +--
.../scripts/gcp/tensor-rstudio_configure.py | 4 +--
.../src/general/scripts/gcp/zeppelin_configure.py | 5 ++-
.../src/jupyter/scripts/configure_jupyter_node.py | 3 +-
.../scripts/configure_jupyterlab_node.py | 1 -
.../src/rstudio/scripts/configure_rstudio_node.py | 3 +-
.../scripts/configure_tensor-rstudio_node.py | 3 +-
.../zeppelin/scripts/configure_zeppelin_node.py | 3 +-
33 files changed, 74 insertions(+), 114 deletions(-)
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org
[incubator-datalab] 02/06: [DATALAB-2409]: removed logging duplicate
Posted by lf...@apache.org.
This is an automated email from the ASF dual-hosted git repository.
lfrolov pushed a commit to branch DATALAB-2409
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git
commit 3fe53c91bc7d4d34368a3d4d21f45459dd2040f1
Author: leonidfrolov <fr...@gmail.com>
AuthorDate: Mon Oct 4 17:18:53 2021 +0300
[DATALAB-2409]: removed logging duplicate
---
infrastructure-provisioning/src/general/scripts/aws/edge_configure.py | 1 -
1 file changed, 1 deletion(-)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/edge_configure.py b/infrastructure-provisioning/src/general/scripts/aws/edge_configure.py
index e688d3c..19269e0 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/edge_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/edge_configure.py
@@ -272,7 +272,6 @@ if __name__ == "__main__":
try:
logging.info('[SUMMARY]')
- logging.info('[SUMMARY]')
logging.info("Instance name: {}".format(edge_conf['instance_name']))
logging.info("Hostname: {}".format(edge_conf['instance_hostname']))
logging.info("Public IP: {}".format(edge_conf['edge_public_ip']))
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org
[incubator-datalab] 01/06: [DATALAB-2409]: fixed function call
Posted by lf...@apache.org.
This is an automated email from the ASF dual-hosted git repository.
lfrolov pushed a commit to branch DATALAB-2409
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git
commit 160e3c5f2fa63432fc77b33f68a36e297a4f7230
Author: leonidfrolov <fr...@gmail.com>
AuthorDate: Mon Oct 4 16:45:49 2021 +0300
[DATALAB-2409]: fixed function call
---
infrastructure-provisioning/src/general/lib/os/fab.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/infrastructure-provisioning/src/general/lib/os/fab.py b/infrastructure-provisioning/src/general/lib/os/fab.py
index c14a867..fd17252 100644
--- a/infrastructure-provisioning/src/general/lib/os/fab.py
+++ b/infrastructure-provisioning/src/general/lib/os/fab.py
@@ -277,7 +277,7 @@ def configure_http_proxy_server(config):
replace_string += 'acl AllowedCIDRS src {}\\n'.format(cidr)
conn.sudo('sed -i "s|ALLOWED_CIDRS|{}|g" /etc/squid/squid.conf'.format(replace_string))
conn.sudo('systemctl restart squid')
- fab.conn.sudo('touch /tmp/http_proxy_ensured')
+ conn.sudo('touch /tmp/http_proxy_ensured')
except Exception as err:
logging.error('Fai to install and configure squid:', str(err))
traceback.print_exc()
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org
[incubator-datalab] 05/06: [DATALAB-2409]: fixed ipykernel version
variable
Posted by lf...@apache.org.
This is an automated email from the ASF dual-hosted git repository.
lfrolov pushed a commit to branch DATALAB-2409
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git
commit 3033891502666f1d4baeb6b991a1ff6ab36d80bd
Author: leonidfrolov <fr...@gmail.com>
AuthorDate: Thu Oct 7 10:06:58 2021 +0300
[DATALAB-2409]: fixed ipykernel version variable
---
.../src/general/lib/os/fab.py | 33 ++++++----------------
1 file changed, 9 insertions(+), 24 deletions(-)
diff --git a/infrastructure-provisioning/src/general/lib/os/fab.py b/infrastructure-provisioning/src/general/lib/os/fab.py
index fd17252..088d81d 100644
--- a/infrastructure-provisioning/src/general/lib/os/fab.py
+++ b/infrastructure-provisioning/src/general/lib/os/fab.py
@@ -318,11 +318,10 @@ def ensure_python_venv(python_venv_version):
conn.sudo('tar zxvf /tmp/Python-{}.tgz -C /tmp/'.format(python_venv_version))
if os.environ['application'] in ('rstudio', 'tensor-rstudio'):
conn.sudo('''bash -l -c 'cd /tmp/Python-{0} && ./configure --prefix=/opt/python/python{0} '''
- '''--with-zlib-dir=/usr/local/lib/ --with-ensurepip=install --enable-shared' '''.format(
- python_venv_version))
- conn.sudo(
- '''bash -l -c 'echo "export LD_LIBRARY_PATH=/opt/python/python{}/lib" >> /etc/profile' '''.format(
- python_venv_version))
+ '''--with-zlib-dir=/usr/local/lib/ --with-ensurepip=install --enable-shared' '''
+ .format(python_venv_version))
+ conn.sudo('''bash -l -c 'echo "export LD_LIBRARY_PATH=/opt/python/python{}/lib" >> /etc/profile' '''
+ .format(python_venv_version))
else:
conn.sudo(
'''bash -l -c 'cd /tmp/Python-{0} && ./configure --prefix=/opt/python/python{0} '''
@@ -336,25 +335,11 @@ def ensure_python_venv(python_venv_version):
conn.sudo('''bash -l -c '{0} && {1} install -UI pip=={2}' '''.format(venv_command, pip_command,
os.environ['conf_pip_version']))
conn.sudo('''bash -l -c '{} && {} install -UI ipython=={} ipykernel=={} NumPy=={} SciPy=={} Matplotlib=={}
- pandas=={} Sympy=={} Pillow=={} scikit-learn=={} --no-cache-dir' '''.format(venv_command, pip_command,
- os.environ[
- 'pip_packages_ipython'],
- os.environ[
- 'pip_packagesipykernel'],
- os.environ[
- 'pip_packages_numpy'],
- os.environ[
- 'pip_packages_scipy'],
- os.environ[
- 'pip_packages_matplotlib'],
- os.environ[
- 'pip_packages_pandas'],
- os.environ[
- 'pip_packages_sympy'],
- os.environ[
- 'pip_packages_pillow'],
- os.environ[
- 'pip_packages_scikit-learn']))
+ pandas=={} Sympy=={} Pillow=={} scikit-learn=={} --no-cache-dir' '''
+ .format(venv_command, pip_command, os.environ['pip_packages_ipython'], os.environ['pip_packages_ipykernel'],
+ os.environ['pip_packages_numpy'], os.environ['pip_packages_scipy'], os.environ['pip_packages_matplotlib'],
+ os.environ['pip_packages_pandas'], os.environ['pip_packages_sympy'], os.environ['pip_packages_pillow'],
+ os.environ['pip_packages_scikit-learn']))
except Exception as err:
logging.error('Function ensure_python_venv error:', str(err))
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org
[incubator-datalab] 04/06: [DATALAB-2409]: removed r_mirror usage
Posted by lf...@apache.org.
This is an automated email from the ASF dual-hosted git repository.
lfrolov pushed a commit to branch DATALAB-2409
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git
commit 4461c32dd2c9a5388cc32825b47cf1d75e648527
Author: leonidfrolov <fr...@gmail.com>
AuthorDate: Wed Oct 6 17:29:21 2021 +0300
[DATALAB-2409]: removed r_mirror usage
---
.../src/dataengine/scripts/configure_dataengine.py | 3 +--
.../src/deeplearning/scripts/configure_deep_learning_node.py | 1 -
.../src/general/lib/os/debian/notebook_lib.py | 7 ++-----
.../src/general/lib/os/redhat/notebook_lib.py | 7 ++-----
.../src/general/scripts/aws/dataengine_configure.py | 8 ++++----
.../src/general/scripts/aws/deeplearning_configure.py | 5 ++---
.../src/general/scripts/aws/jupyter_configure.py | 8 +++-----
.../src/general/scripts/aws/jupyterlab_configure.py | 2 --
.../src/general/scripts/aws/rstudio_configure.py | 9 +++------
.../src/general/scripts/aws/tensor-rstudio_configure.py | 4 ++--
.../src/general/scripts/aws/zeppelin_configure.py | 6 +++---
.../src/general/scripts/azure/dataengine_configure.py | 8 ++++----
.../src/general/scripts/azure/deeplearning_configure.py | 6 +++---
.../src/general/scripts/azure/jupyter_configure.py | 8 ++++----
.../src/general/scripts/azure/jupyterlab_configure.py | 6 +++---
.../src/general/scripts/azure/rstudio_configure.py | 4 ++--
.../src/general/scripts/azure/zeppelin_configure.py | 6 +++---
.../src/general/scripts/gcp/dataengine_configure.py | 9 ++++-----
.../src/general/scripts/gcp/deeplearning_configure.py | 4 ++--
.../src/general/scripts/gcp/jupyter_configure.py | 4 ++--
.../src/general/scripts/gcp/jupyterlab_configure.py | 5 ++---
.../src/general/scripts/gcp/rstudio_configure.py | 4 ++--
.../src/general/scripts/gcp/tensor-rstudio_configure.py | 4 ++--
.../src/general/scripts/gcp/zeppelin_configure.py | 5 ++---
.../src/jupyter/scripts/configure_jupyter_node.py | 3 +--
.../src/jupyterlab/scripts/configure_jupyterlab_node.py | 1 -
.../src/rstudio/scripts/configure_rstudio_node.py | 3 +--
.../src/tensor-rstudio/scripts/configure_tensor-rstudio_node.py | 3 +--
.../src/zeppelin/scripts/configure_zeppelin_node.py | 3 +--
29 files changed, 61 insertions(+), 85 deletions(-)
diff --git a/infrastructure-provisioning/src/dataengine/scripts/configure_dataengine.py b/infrastructure-provisioning/src/dataengine/scripts/configure_dataengine.py
index 08af772..4de0ca3 100644
--- a/infrastructure-provisioning/src/dataengine/scripts/configure_dataengine.py
+++ b/infrastructure-provisioning/src/dataengine/scripts/configure_dataengine.py
@@ -36,7 +36,6 @@ parser.add_argument('--spark_version', type=str, default='')
parser.add_argument('--hadoop_version', type=str, default='')
parser.add_argument('--os_user', type=str, default='')
parser.add_argument('--scala_version', type=str, default='')
-parser.add_argument('--r_mirror', type=str, default='')
parser.add_argument('--master_ip', type=str, default='')
parser.add_argument('--node_type', type=str, default='')
args = parser.parse_args()
@@ -139,7 +138,7 @@ if __name__ == "__main__":
and os.environ['notebook_r_enabled'] == 'true') \
or os.environ['application'] in ('rstudio', 'tensor-rstudio'):
print("Installing R")
- ensure_r(args.os_user, r_libs, args.region, args.r_mirror)
+ ensure_r(args.os_user, r_libs)
print("Install Python 3 modules")
ensure_python3_libraries(args.os_user)
if os.environ['application'] == 'zeppelin':
diff --git a/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py b/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py
index 54f8601..2c7a88d 100644
--- a/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py
+++ b/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py
@@ -40,7 +40,6 @@ parser.add_argument('--jupyter_version', type=str, default='')
parser.add_argument('--scala_version', type=str, default='')
parser.add_argument('--spark_version', type=str, default='')
parser.add_argument('--hadoop_version', type=str, default='')
-parser.add_argument('--r_mirror', type=str, default='')
parser.add_argument('--ip_address', type=str, default='')
parser.add_argument('--exploratory_name', type=str, default='')
parser.add_argument('--edge_ip', type=str, default='')
diff --git a/infrastructure-provisioning/src/general/lib/os/debian/notebook_lib.py b/infrastructure-provisioning/src/general/lib/os/debian/notebook_lib.py
index 95b8135..2d7a82a 100644
--- a/infrastructure-provisioning/src/general/lib/os/debian/notebook_lib.py
+++ b/infrastructure-provisioning/src/general/lib/os/debian/notebook_lib.py
@@ -79,13 +79,10 @@ def add_marruter_key():
except:
sys.exit(1)
-def ensure_r(os_user, r_libs, region, r_mirror):
+def ensure_r(os_user, r_libs):
if not exists(datalab.fab.conn,'/home/' + os_user + '/.ensure_dir/r_ensured'):
try:
- if region == 'cn-north-1':
- r_repository = r_mirror
- else:
- r_repository = 'https://cloud.r-project.org'
+ r_repository = 'https://cloud.r-project.org'
#add_marruter_key()
datalab.fab.conn.sudo('apt update')
manage_pkg('-yV install', 'remote', 'libssl-dev libcurl4-gnutls-dev libgit2-dev libxml2-dev libreadline-dev')
diff --git a/infrastructure-provisioning/src/general/lib/os/redhat/notebook_lib.py b/infrastructure-provisioning/src/general/lib/os/redhat/notebook_lib.py
index f90df73..755e87d 100644
--- a/infrastructure-provisioning/src/general/lib/os/redhat/notebook_lib.py
+++ b/infrastructure-provisioning/src/general/lib/os/redhat/notebook_lib.py
@@ -82,13 +82,10 @@ def ensure_r_local_kernel(spark_version, os_user, templates_dir, kernels_dir):
sys.exit(1)
-def ensure_r(os_user, r_libs, region, r_mirror):
+def ensure_r(os_user, r_libs):
if not exists(datalab.fab.conn,'/home/{}/.ensure_dir/r_ensured'.format(os_user)):
try:
- if region == 'cn-north-1':
- r_repository = r_mirror
- else:
- r_repository = 'https://cloud.r-project.org'
+ r_repository = 'https://cloud.r-project.org'
manage_pkg('-y install', 'remote', 'cmake')
manage_pkg('-y install', 'remote', 'libcur*')
datalab.fab.conn.sudo('echo -e "[base]\nname=CentOS-7-Base\nbaseurl=http://buildlogs.centos.org/centos/7/os/x86_64-20140704-1/\ngpgcheck=1\ngpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7\npriority=1\nexclude=php mysql" >> /etc/yum.repos.d/CentOS-base.repo')
diff --git a/infrastructure-provisioning/src/general/scripts/aws/dataengine_configure.py b/infrastructure-provisioning/src/general/scripts/aws/dataengine_configure.py
index 47fd12a..e9bbf11 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/dataengine_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/dataengine_configure.py
@@ -100,10 +100,10 @@ def configure_slave(slave_number, data_engine):
try:
logging.info('[CONFIGURE SLAVE NODE {}]'.format(slave + 1))
params = "--hostname {} --keyfile {} --region {} --spark_version {} --hadoop_version {} --os_user {} " \
- "--scala_version {} --r_mirror {} --master_ip {} --node_type {}". \
+ "--scala_version {} --master_ip {} --node_type {}". \
format(slave_hostname, keyfile_name, data_engine['region'], os.environ['notebook_spark_version'],
os.environ['notebook_hadoop_version'], data_engine['datalab_ssh_user'],
- os.environ['notebook_scala_version'], os.environ['notebook_r_mirror'], master_node_hostname,
+ os.environ['notebook_scala_version'], master_node_hostname,
'slave')
try:
subprocess.run("~/scripts/{}.py {}".format('configure_dataengine', params), shell=True, check=True)
@@ -287,10 +287,10 @@ if __name__ == "__main__":
try:
logging.info('[CONFIGURE MASTER NODE]')
params = "--hostname {} --keyfile {} --region {} --spark_version {} --hadoop_version {} --os_user {} " \
- "--scala_version {} --r_mirror {} --master_ip {} --node_type {}".\
+ "--scala_version {} --master_ip {} --node_type {}".\
format(master_node_hostname, keyfile_name, data_engine['region'], os.environ['notebook_spark_version'],
os.environ['notebook_hadoop_version'], data_engine['datalab_ssh_user'],
- os.environ['notebook_scala_version'], os.environ['notebook_r_mirror'], master_node_hostname,
+ os.environ['notebook_scala_version'], master_node_hostname,
'master')
try:
subprocess.run("~/scripts/{}.py {}".format('configure_dataengine', params), shell=True, check=True)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/deeplearning_configure.py b/infrastructure-provisioning/src/general/scripts/aws/deeplearning_configure.py
index d1d7c4e..44e3621 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/deeplearning_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/deeplearning_configure.py
@@ -176,12 +176,11 @@ if __name__ == "__main__":
"--os_user {2} --jupyter_version {3} " \
"--scala_version {4} --spark_version {5} " \
"--hadoop_version {6} --region {7} " \
- "--r_mirror {8} --ip_address {9} --exploratory_name {10} --edge_ip {11}" \
+ "--ip_address {8} --exploratory_name {9} --edge_ip {10}" \
.format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
os.environ['notebook_jupyter_version'], os.environ['notebook_scala_version'],
os.environ['notebook_spark_version'], os.environ['notebook_hadoop_version'],
- os.environ['aws_region'], os.environ['notebook_r_mirror'],
- notebook_config['ip_address'], notebook_config['exploratory_name'], edge_ip)
+ os.environ['aws_region'], notebook_config['ip_address'], notebook_config['exploratory_name'], edge_ip)
try:
subprocess.run("~/scripts/{}.py {}".format('configure_deep_learning_node', params), shell=True, check=True)
except:
diff --git a/infrastructure-provisioning/src/general/scripts/aws/jupyter_configure.py b/infrastructure-provisioning/src/general/scripts/aws/jupyter_configure.py
index 0bc9503..cba9911 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/jupyter_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/jupyter_configure.py
@@ -164,10 +164,9 @@ if __name__ == "__main__":
"--hadoop_version {4} " \
"--os_user {5} " \
"--scala_version {6} " \
- "--r_mirror {7} " \
- "--ip_address {8} " \
- "--exploratory_name {9} " \
- "--edge_ip {10}".\
+ "--ip_address {7} " \
+ "--exploratory_name {8} " \
+ "--edge_ip {9}".\
format(instance_hostname,
keyfile_name,
os.environ['aws_region'],
@@ -175,7 +174,6 @@ if __name__ == "__main__":
os.environ['notebook_hadoop_version'],
notebook_config['datalab_ssh_user'],
os.environ['notebook_scala_version'],
- os.environ['notebook_r_mirror'],
notebook_config['ip_address'],
notebook_config['exploratory_name'],
edge_ip)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/jupyterlab_configure.py b/infrastructure-provisioning/src/general/scripts/aws/jupyterlab_configure.py
index 6fe68e3..aa33a49 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/jupyterlab_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/jupyterlab_configure.py
@@ -164,7 +164,6 @@ if __name__ == "__main__":
"--hadoop_version {} " \
"--os_user {} " \
"--scala_version {} " \
- "--r_mirror {} " \
"--ip_address {} " \
"--exploratory_name {}".\
format(instance_hostname,
@@ -175,7 +174,6 @@ if __name__ == "__main__":
os.environ['notebook_hadoop_version'],
notebook_config['datalab_ssh_user'],
os.environ['notebook_scala_version'],
- os.environ['notebook_r_mirror'],
notebook_config['ip_address'],
notebook_config['exploratory_name'])
try:
diff --git a/infrastructure-provisioning/src/general/scripts/aws/rstudio_configure.py b/infrastructure-provisioning/src/general/scripts/aws/rstudio_configure.py
index 1f77291..231a13d 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/rstudio_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/rstudio_configure.py
@@ -165,15 +165,12 @@ if __name__ == "__main__":
# installing and configuring R_STUDIO and all dependencies
try:
logging.info('[CONFIGURE R_STUDIO NOTEBOOK INSTANCE]')
- params = "--hostname {0} --keyfile {1} " \
- "--region {2} --rstudio_pass {3} " \
- "--rstudio_version {4} --os_user {5} " \
- "--r_mirror {6} --ip_address {7} --exploratory_name {8} --edge_ip {9}" \
+ params = "--hostname {0} --keyfile {1} --region {2} --rstudio_pass {3} --os_user {5} " \
+ "--ip_address {6} --exploratory_name {7} --edge_ip {8}" \
.format(instance_hostname, keyfile_name,
os.environ['aws_region'], notebook_config['rstudio_pass'],
os.environ['notebook_rstudio_version'], notebook_config['datalab_ssh_user'],
- os.environ['notebook_r_mirror'], notebook_config['ip_address'],
- notebook_config['exploratory_name'], edge_ip)
+ notebook_config['ip_address'], notebook_config['exploratory_name'], edge_ip)
try:
subprocess.run("~/scripts/{}.py {}".format('configure_rstudio_node', params), shell=True, check=True)
except:
diff --git a/infrastructure-provisioning/src/general/scripts/aws/tensor-rstudio_configure.py b/infrastructure-provisioning/src/general/scripts/aws/tensor-rstudio_configure.py
index 83f942d..c51b09f 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/tensor-rstudio_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/tensor-rstudio_configure.py
@@ -161,11 +161,11 @@ if __name__ == "__main__":
params = "--hostname {0} --keyfile {1} " \
"--region {2} --rstudio_pass {3} " \
"--rstudio_version {4} --os_user {5} " \
- "--r_mirror {6} --ip_address {7} --exploratory_name {8} --edge_ip {9}" \
+ "--ip_address {6} --exploratory_name {7} --edge_ip {8}" \
.format(instance_hostname, keyfile_name,
os.environ['aws_region'], notebook_config['rstudio_pass'],
os.environ['notebook_rstudio_version'], notebook_config['datalab_ssh_user'],
- os.environ['notebook_r_mirror'], notebook_config['ip_address'],
+ notebook_config['ip_address'],
notebook_config['exploratory_name'], edge_ip)
try:
subprocess.run("~/scripts/{}.py {}".format('configure_tensor-rstudio_node', params), shell=True, check=True)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/zeppelin_configure.py b/infrastructure-provisioning/src/general/scripts/aws/zeppelin_configure.py
index 96105fe..3a12ee6 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/zeppelin_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/zeppelin_configure.py
@@ -177,15 +177,15 @@ if __name__ == "__main__":
"--edge_hostname {8} --proxy_port {9} " \
"--zeppelin_version {10} --scala_version {11} " \
"--livy_version {12} --multiple_clusters {13} " \
- "--r_mirror {14} --endpoint_url {15} " \
- "--ip_address {16} --exploratory_name {17} --edge_ip {18}" \
+ "--endpoint_url {14} " \
+ "--ip_address {15} --exploratory_name {16} --edge_ip {17}" \
.format(instance_hostname, notebook_config['instance_name'], keyfile_name, os.environ['aws_region'],
json.dumps(additional_config), notebook_config['datalab_ssh_user'],
os.environ['notebook_spark_version'],
os.environ['notebook_hadoop_version'], edge_instance_hostname, '3128',
os.environ['notebook_zeppelin_version'], os.environ['notebook_scala_version'],
os.environ['notebook_livy_version'], os.environ['notebook_multiple_clusters'],
- os.environ['notebook_r_mirror'], notebook_config['endpoint_url'], notebook_config['ip_address'],
+ notebook_config['endpoint_url'], notebook_config['ip_address'],
notebook_config['exploratory_name'], edge_ip)
try:
subprocess.run("~/scripts/{}.py {}".format('configure_zeppelin_node', params), shell=True, check=True)
diff --git a/infrastructure-provisioning/src/general/scripts/azure/dataengine_configure.py b/infrastructure-provisioning/src/general/scripts/azure/dataengine_configure.py
index 088fbec..27bb216 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/dataengine_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/dataengine_configure.py
@@ -125,10 +125,10 @@ def configure_slave(slave_number, data_engine):
logging.info('[CONFIGURE SLAVE NODE {}]'.format(slave + 1))
print('[CONFIGURE SLAVE NODE {}]'.format(slave + 1))
params = "--hostname {} --keyfile {} --region {} --spark_version {} --hadoop_version {} --os_user {} " \
- "--scala_version {} --r_mirror {} --master_ip {} --node_type {}". \
+ "--scala_version {} --master_ip {} --node_type {}". \
format(slave_hostname, keyfile_name, data_engine['region'], os.environ['notebook_spark_version'],
os.environ['notebook_hadoop_version'], data_engine['datalab_ssh_user'],
- os.environ['notebook_scala_version'], os.environ['notebook_r_mirror'], master_node_hostname,
+ os.environ['notebook_scala_version'], master_node_hostname,
'slave')
try:
subprocess.run("~/scripts/{}.py {}".format('configure_dataengine', params), shell=True, check=True)
@@ -317,10 +317,10 @@ if __name__ == "__main__":
logging.info('[CONFIGURE MASTER NODE]')
print('[CONFIGURE MASTER NODE]')
params = "--hostname {} --keyfile {} --region {} --spark_version {} --hadoop_version {} --os_user {} " \
- "--scala_version {} --r_mirror {} --master_ip {} --node_type {}".\
+ "--scala_version {} --master_ip {} --node_type {}".\
format(master_node_hostname, keyfile_name, data_engine['region'], os.environ['notebook_spark_version'],
os.environ['notebook_hadoop_version'], data_engine['datalab_ssh_user'],
- os.environ['notebook_scala_version'], os.environ['notebook_r_mirror'], master_node_hostname,
+ os.environ['notebook_scala_version'], master_node_hostname,
'master')
try:
subprocess.run("~/scripts/{}.py {}".format('configure_dataengine', params), shell=True, check=True)
diff --git a/infrastructure-provisioning/src/general/scripts/azure/deeplearning_configure.py b/infrastructure-provisioning/src/general/scripts/azure/deeplearning_configure.py
index be91e28..c228c97 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/deeplearning_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/deeplearning_configure.py
@@ -209,12 +209,12 @@ if __name__ == "__main__":
"--os_user {2} --jupyter_version {3} " \
"--scala_version {4} --spark_version {5} " \
"--hadoop_version {6} --region {7} " \
- "--r_mirror {8} --ip_address {9} --exploratory_name {10} --edge_ip {11}" \
+ "--ip_address {8} --exploratory_name {9} --edge_ip {10}" \
.format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
os.environ['notebook_jupyter_version'], os.environ['notebook_scala_version'],
os.environ['notebook_spark_version'], os.environ['notebook_hadoop_version'],
- os.environ['azure_region'], os.environ['notebook_r_mirror'],
- notebook_config['ip_address'], notebook_config['exploratory_name'], edge_hostname)
+ os.environ['azure_region'], notebook_config['ip_address'],
+ notebook_config['exploratory_name'], edge_hostname)
try:
subprocess.run("~/scripts/{}.py {}".format('configure_deep_learning_node', params), shell=True, check=True)
datalab.actions_lib.remount_azure_disk(True, notebook_config['datalab_ssh_user'], instance_hostname,
diff --git a/infrastructure-provisioning/src/general/scripts/azure/jupyter_configure.py b/infrastructure-provisioning/src/general/scripts/azure/jupyter_configure.py
index 2e939b9..0f30b71 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/jupyter_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/jupyter_configure.py
@@ -191,13 +191,13 @@ if __name__ == "__main__":
params = "--hostname {0} --keyfile {1} " \
"--region {2} --spark_version {3} " \
"--hadoop_version {4} --os_user {5} " \
- "--scala_version {6} --r_mirror {7} " \
- "--ip_address {8} --exploratory_name {9} --edge_ip {10}".\
+ "--scala_version {6} " \
+ "--ip_address {7} --exploratory_name {8} --edge_ip {9}".\
format(instance_hostname, keyfile_name,
os.environ['azure_region'], os.environ['notebook_spark_version'],
os.environ['notebook_hadoop_version'], notebook_config['datalab_ssh_user'],
- os.environ['notebook_scala_version'], os.environ['notebook_r_mirror'],
- notebook_config['ip_address'], notebook_config['exploratory_name'], edge_hostname)
+ os.environ['notebook_scala_version'], notebook_config['ip_address'],
+ notebook_config['exploratory_name'], edge_hostname)
try:
subprocess.run("~/scripts/{}.py {}".format('configure_jupyter_node', params), shell=True, check=True)
datalab.actions_lib.remount_azure_disk(True, notebook_config['datalab_ssh_user'], instance_hostname,
diff --git a/infrastructure-provisioning/src/general/scripts/azure/jupyterlab_configure.py b/infrastructure-provisioning/src/general/scripts/azure/jupyterlab_configure.py
index 8c694e1..4d8fe90 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/jupyterlab_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/jupyterlab_configure.py
@@ -191,12 +191,12 @@ if __name__ == "__main__":
params = "--hostname {0} --keyfile {1} " \
"--region {2} --spark_version {3} " \
"--hadoop_version {4} --os_user {5} " \
- "--scala_version {6} --r_mirror {7} " \
- "--ip_address {8} --exploratory_name {9} --edge_ip {10}".\
+ "--scala_version {6} " \
+ "--ip_address {7} --exploratory_name {8} --edge_ip {9}".\
format(instance_hostname, keyfile_name,
os.environ['azure_region'], os.environ['notebook_spark_version'],
os.environ['notebook_hadoop_version'], notebook_config['datalab_ssh_user'],
- os.environ['notebook_scala_version'], os.environ['notebook_r_mirror'],
+ os.environ['notebook_scala_version'],
notebook_config['ip_address'], notebook_config['exploratory_name'], edge_hostname)
try:
subprocess.run("~/scripts/{}.py {}".format('configure_jupyterlab_node', params), shell=True, check=True)
diff --git a/infrastructure-provisioning/src/general/scripts/azure/rstudio_configure.py b/infrastructure-provisioning/src/general/scripts/azure/rstudio_configure.py
index 4ed3ad7..bbf85e5 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/rstudio_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/rstudio_configure.py
@@ -192,11 +192,11 @@ if __name__ == "__main__":
params = "--hostname {0} --keyfile {1} " \
"--region {2} --rstudio_pass {3} " \
"--rstudio_version {4} --os_user {5} " \
- "--r_mirror {6} --ip_address {7} --exploratory_name {8} --edge_ip {9} " \
+ "--ip_address {6} --exploratory_name {7} --edge_ip {8} " \
.format(instance_hostname, keyfile_name,
os.environ['azure_region'], notebook_config['rstudio_pass'],
os.environ['notebook_rstudio_version'], notebook_config['datalab_ssh_user'],
- os.environ['notebook_r_mirror'], notebook_config['ip_address'],
+ notebook_config['ip_address'],
notebook_config['exploratory_name'], edge_hostname)
try:
subprocess.run("~/scripts/{}.py {}".format('configure_rstudio_node', params), shell=True, check=True)
diff --git a/infrastructure-provisioning/src/general/scripts/azure/zeppelin_configure.py b/infrastructure-provisioning/src/general/scripts/azure/zeppelin_configure.py
index b4a29b4..536955b 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/zeppelin_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/zeppelin_configure.py
@@ -200,14 +200,14 @@ if __name__ == "__main__":
"--edge_hostname {8} --proxy_port {9} " \
"--zeppelin_version {10} --scala_version {11} " \
"--livy_version {12} --multiple_clusters {13} " \
- "--r_mirror {14} --endpoint_url {15} " \
- "--ip_address {16} --exploratory_name {17} --edge_ip {18} " \
+ "--endpoint_url {14} " \
+ "--ip_address {15} --exploratory_name {16} --edge_ip {17} " \
.format(instance_hostname, notebook_config['instance_name'], keyfile_name, os.environ['azure_region'],
json.dumps(additional_config), notebook_config['datalab_ssh_user'],
os.environ['notebook_spark_version'], os.environ['notebook_hadoop_version'],
edge_instance_private_hostname, '3128', os.environ['notebook_zeppelin_version'],
os.environ['notebook_scala_version'], os.environ['notebook_livy_version'],
- os.environ['notebook_multiple_clusters'], os.environ['notebook_r_mirror'], 'null',
+ os.environ['notebook_multiple_clusters'], 'null',
notebook_config['ip_address'], notebook_config['exploratory_name'], edge_hostname)
try:
subprocess.run("~/scripts/{}.py {}".format('configure_zeppelin_node', params), shell=True, check=True)
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/dataengine_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/dataengine_configure.py
index 87e6bb2..5a33caa 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine_configure.py
@@ -109,11 +109,10 @@ def configure_slave(slave_number, data_engine):
logging.info('[CONFIGURE SLAVE NODE {}]'.format(slave + 1))
print('[CONFIGURE SLAVE NODE {}]'.format(slave + 1))
params = "--hostname {} --keyfile {} --region {} --spark_version {} --hadoop_version {} --os_user {} " \
- "--scala_version {} --r_mirror {} --master_ip {} --node_type {}". \
+ "--scala_version {} --master_ip {} --node_type {}". \
format(slave_hostname, keyfile_name, data_engine['region'], os.environ['notebook_spark_version'],
os.environ['notebook_hadoop_version'], data_engine['datalab_ssh_user'],
- os.environ['notebook_scala_version'], os.environ['notebook_r_mirror'], master_node_hostname,
- 'slave')
+ os.environ['notebook_scala_version'], master_node_hostname, 'slave')
try:
subprocess.run("~/scripts/{}.py {}".format('configure_dataengine', params), shell=True, check=True)
except:
@@ -299,10 +298,10 @@ if __name__ == "__main__":
logging.info('[CONFIGURE MASTER NODE]')
print('[CONFIGURE MASTER NODE]')
params = "--hostname {} --keyfile {} --region {} --spark_version {} --hadoop_version {} --os_user {} " \
- "--scala_version {} --r_mirror {} --master_ip {} --node_type {}".\
+ "--scala_version {} --master_ip {} --node_type {}".\
format(master_node_hostname, keyfile_name, data_engine['region'], os.environ['notebook_spark_version'],
os.environ['notebook_hadoop_version'], data_engine['datalab_ssh_user'],
- os.environ['notebook_scala_version'], os.environ['notebook_r_mirror'], master_node_hostname,
+ os.environ['notebook_scala_version'], master_node_hostname,
'master')
try:
subprocess.run("~/scripts/{}.py {}".format('configure_dataengine', params), shell=True, check=True)
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/deeplearning_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/deeplearning_configure.py
index 3be0192..5f8fea4 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/deeplearning_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/deeplearning_configure.py
@@ -162,11 +162,11 @@ if __name__ == "__main__":
"--os_user {} --jupyter_version {} " \
"--scala_version {} --spark_version {} " \
"--hadoop_version {} --region {} " \
- "--r_mirror {} --exploratory_name {} --edge_ip {}" \
+ "--exploratory_name {} --edge_ip {}" \
.format(instance_hostname, notebook_config['ssh_key_path'], notebook_config['datalab_ssh_user'],
os.environ['notebook_jupyter_version'], os.environ['notebook_scala_version'],
os.environ['notebook_spark_version'], os.environ['notebook_hadoop_version'],
- os.environ['gcp_region'], os.environ['notebook_r_mirror'],
+ os.environ['gcp_region'],
notebook_config['exploratory_name'], edge_instance_private_ip)
try:
subprocess.run("~/scripts/{}.py {}".format('configure_deep_learning_node', params), shell=True, check=True)
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/jupyter_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/jupyter_configure.py
index 0ede3eb..e27f16b 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/jupyter_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/jupyter_configure.py
@@ -162,13 +162,13 @@ if __name__ == "__main__":
params = "--hostname {} --keyfile {} " \
"--region {} --spark_version {} " \
"--hadoop_version {} --os_user {} " \
- "--scala_version {} --r_mirror {} " \
+ "--scala_version {} " \
"--exploratory_name {} "\
"--edge_ip {}".\
format(instance_hostname, notebook_config['ssh_key_path'],
os.environ['gcp_region'], os.environ['notebook_spark_version'],
os.environ['notebook_hadoop_version'], notebook_config['datalab_ssh_user'],
- os.environ['notebook_scala_version'], os.environ['notebook_r_mirror'],
+ os.environ['notebook_scala_version'],
notebook_config['exploratory_name'], edge_instance_private_ip)
try:
subprocess.run("~/scripts/{}.py {}".format('configure_jupyter_node', params), shell=True, check=True)
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/jupyterlab_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/jupyterlab_configure.py
index 4207257..d1f1db1 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/jupyterlab_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/jupyterlab_configure.py
@@ -162,13 +162,12 @@ if __name__ == "__main__":
params = "--hostname {} --keyfile {} --edge_ip {} " \
"--region {} --spark_version {} " \
"--hadoop_version {} --os_user {} " \
- "--scala_version {} --r_mirror {} " \
+ "--scala_version {} " \
"--exploratory_name {}".\
format(instance_hostname, notebook_config['ssh_key_path'], edge_instance_private_ip,
os.environ['gcp_region'], os.environ['notebook_spark_version'],
os.environ['notebook_hadoop_version'], notebook_config['datalab_ssh_user'],
- os.environ['notebook_scala_version'], os.environ['notebook_r_mirror'],
- notebook_config['exploratory_name'], )
+ os.environ['notebook_scala_version'], notebook_config['exploratory_name'], )
try:
subprocess.run("~/scripts/{}.py {}".format('configure_jupyterlab_node', params), shell=True, check=True)
except:
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/rstudio_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/rstudio_configure.py
index 8b51ca0..3991d50 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/rstudio_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/rstudio_configure.py
@@ -166,11 +166,11 @@ if __name__ == "__main__":
params = "--hostname {0} --keyfile {1} " \
"--region {2} --rstudio_pass {3} " \
"--rstudio_version {4} --os_user {5} " \
- "--r_mirror {6} --ip_address {7} --exploratory_name {8} --edge_ip {9}" \
+ "--ip_address {6} --exploratory_name {7} --edge_ip {8}" \
.format(instance_hostname, notebook_config['ssh_key_path'],
os.environ['gcp_region'], notebook_config['rstudio_pass'],
os.environ['notebook_rstudio_version'], notebook_config['datalab_ssh_user'],
- os.environ['notebook_r_mirror'], notebook_config['ip_address'],
+ notebook_config['ip_address'],
notebook_config['exploratory_name'], edge_instance_private_ip)
try:
subprocess.run("~/scripts/{}.py {}".format('configure_rstudio_node', params), shell=True, check=True)
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/tensor-rstudio_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/tensor-rstudio_configure.py
index 56ddd56..f201944 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/tensor-rstudio_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/tensor-rstudio_configure.py
@@ -169,11 +169,11 @@ if __name__ == "__main__":
params = "--hostname {} --keyfile {} " \
"--region {} --rstudio_pass {} " \
"--rstudio_version {} --os_user {} " \
- "--r_mirror {} --exploratory_name {} --edge_ip {}" \
+ "--exploratory_name {} --edge_ip {}" \
.format(instance_hostname, notebook_config['ssh_key_path'],
os.environ['gcp_region'], notebook_config['rstudio_pass'],
os.environ['notebook_rstudio_version'], notebook_config['datalab_ssh_user'],
- os.environ['notebook_r_mirror'], notebook_config['exploratory_name'], edge_instance_private_ip)
+ notebook_config['exploratory_name'], edge_instance_private_ip)
try:
subprocess.run("~/scripts/{}.py {}".format('configure_tensor-rstudio_node', params), shell=True, check=True)
except:
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/zeppelin_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/zeppelin_configure.py
index c55f2e4..a9fe2b4 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/zeppelin_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/zeppelin_configure.py
@@ -170,7 +170,7 @@ if __name__ == "__main__":
"--edge_hostname {} --proxy_port {} " \
"--zeppelin_version {} --scala_version {} " \
"--livy_version {} --multiple_clusters {} " \
- "--r_mirror {} --endpoint_url {} " \
+ "--endpoint_url {} " \
"--exploratory_name {} " \
"--edge_ip {}" \
.format(instance_hostname, notebook_config['instance_name'], notebook_config['ssh_key_path'],
@@ -178,8 +178,7 @@ if __name__ == "__main__":
os.environ['notebook_spark_version'], os.environ['notebook_hadoop_version'], edge_instance_name,
'3128', os.environ['notebook_zeppelin_version'], os.environ['notebook_scala_version'],
os.environ['notebook_livy_version'], os.environ['notebook_multiple_clusters'],
- os.environ['notebook_r_mirror'], 'null',
- notebook_config['exploratory_name'], edge_instance_private_ip)
+ 'null', notebook_config['exploratory_name'], edge_instance_private_ip)
try:
subprocess.run("~/scripts/{}.py {}".format('configure_zeppelin_node', params), shell=True, check=True)
except:
diff --git a/infrastructure-provisioning/src/jupyter/scripts/configure_jupyter_node.py b/infrastructure-provisioning/src/jupyter/scripts/configure_jupyter_node.py
index 76c5fbf..3369ab2 100644
--- a/infrastructure-provisioning/src/jupyter/scripts/configure_jupyter_node.py
+++ b/infrastructure-provisioning/src/jupyter/scripts/configure_jupyter_node.py
@@ -36,7 +36,6 @@ parser.add_argument('--spark_version', type=str, default='')
parser.add_argument('--hadoop_version', type=str, default='')
parser.add_argument('--os_user', type=str, default='')
parser.add_argument('--scala_version', type=str, default='')
-parser.add_argument('--r_mirror', type=str, default='')
parser.add_argument('--ip_address', type=str, default='')
parser.add_argument('--exploratory_name', type=str, default='')
parser.add_argument('--edge_ip', type=str, default='')
@@ -93,7 +92,7 @@ if __name__ == "__main__":
ensure_scala(scala_link, args.scala_version, args.os_user)
if os.environ['notebook_r_enabled'] == 'true':
print("Installing R")
- ensure_r(args.os_user, r_libs, args.region, args.r_mirror)
+ ensure_r(args.os_user, r_libs)
print("Install Python 3 modules")
ensure_python3_libraries(args.os_user)
diff --git a/infrastructure-provisioning/src/jupyterlab/scripts/configure_jupyterlab_node.py b/infrastructure-provisioning/src/jupyterlab/scripts/configure_jupyterlab_node.py
index 928f868..e8b5862 100644
--- a/infrastructure-provisioning/src/jupyterlab/scripts/configure_jupyterlab_node.py
+++ b/infrastructure-provisioning/src/jupyterlab/scripts/configure_jupyterlab_node.py
@@ -37,7 +37,6 @@ parser.add_argument('--spark_version', type=str, default='')
parser.add_argument('--hadoop_version', type=str, default='')
parser.add_argument('--os_user', type=str, default='')
parser.add_argument('--scala_version', type=str, default='')
-parser.add_argument('--r_mirror', type=str, default='')
parser.add_argument('--ip_address', type=str, default='')
parser.add_argument('--exploratory_name', type=str, default='')
args = parser.parse_args()
diff --git a/infrastructure-provisioning/src/rstudio/scripts/configure_rstudio_node.py b/infrastructure-provisioning/src/rstudio/scripts/configure_rstudio_node.py
index df84c99..397a22b 100644
--- a/infrastructure-provisioning/src/rstudio/scripts/configure_rstudio_node.py
+++ b/infrastructure-provisioning/src/rstudio/scripts/configure_rstudio_node.py
@@ -39,7 +39,6 @@ parser.add_argument('--region', type=str, default='')
parser.add_argument('--os_user', type=str, default='')
parser.add_argument('--rstudio_pass', type=str, default='')
parser.add_argument('--rstudio_version', type=str, default='')
-parser.add_argument('--r_mirror', type=str, default='')
parser.add_argument('--ip_address', type=str, default='')
parser.add_argument('--exploratory_name', type=str, default='')
parser.add_argument('--edge_ip', type=str, default='')
@@ -86,7 +85,7 @@ if __name__ == "__main__":
print("Install Java")
ensure_jre_jdk(args.os_user)
print("Install R")
- ensure_r(args.os_user, r_libs, args.region, args.r_mirror)
+ ensure_r(args.os_user, r_libs)
print("Install Python 3 modules")
ensure_python3_libraries(args.os_user)
diff --git a/infrastructure-provisioning/src/tensor-rstudio/scripts/configure_tensor-rstudio_node.py b/infrastructure-provisioning/src/tensor-rstudio/scripts/configure_tensor-rstudio_node.py
index e3398a4..e1b7cf4 100644
--- a/infrastructure-provisioning/src/tensor-rstudio/scripts/configure_tensor-rstudio_node.py
+++ b/infrastructure-provisioning/src/tensor-rstudio/scripts/configure_tensor-rstudio_node.py
@@ -39,7 +39,6 @@ parser.add_argument('--region', type=str, default='')
parser.add_argument('--os_user', type=str, default='')
parser.add_argument('--rstudio_pass', type=str, default='')
parser.add_argument('--rstudio_version', type=str, default='')
-parser.add_argument('--r_mirror', type=str, default='')
parser.add_argument('--ip_address', type=str, default='')
parser.add_argument('--exploratory_name', type=str, default='')
parser.add_argument('--edge_ip', type=str, default='')
@@ -94,7 +93,7 @@ if __name__ == "__main__":
print("Install Java")
ensure_jre_jdk(args.os_user)
print("Install R")
- ensure_r(args.os_user, r_libs, args.region, args.r_mirror)
+ ensure_r(args.os_user, r_libs)
print("Install Python 3 modules")
ensure_python3_libraries(args.os_user)
diff --git a/infrastructure-provisioning/src/zeppelin/scripts/configure_zeppelin_node.py b/infrastructure-provisioning/src/zeppelin/scripts/configure_zeppelin_node.py
index edbe969..896bc08 100644
--- a/infrastructure-provisioning/src/zeppelin/scripts/configure_zeppelin_node.py
+++ b/infrastructure-provisioning/src/zeppelin/scripts/configure_zeppelin_node.py
@@ -47,7 +47,6 @@ parser.add_argument('--proxy_port', type=str, default='')
parser.add_argument('--scala_version', type=str, default='')
parser.add_argument('--livy_version', type=str, default='')
parser.add_argument('--multiple_clusters', type=str, default='')
-parser.add_argument('--r_mirror', type=str, default='')
parser.add_argument('--endpoint_url', type=str, default='')
parser.add_argument('--ip_address', type=str, default='')
parser.add_argument('--exploratory_name', type=str, default='')
@@ -226,7 +225,7 @@ if __name__ == "__main__":
ensure_scala(scala_link, args.scala_version, args.os_user)
if os.environ['notebook_r_enabled'] == 'true':
print("Installing R")
- ensure_r(args.os_user, r_libs, args.region, args.r_mirror)
+ ensure_r(args.os_user, r_libs)
print("Install Python 3 modules")
ensure_python3_libraries(args.os_user)
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org
[incubator-datalab] 03/06: [DATALAB-2409]: fixed sed for managing
/opt mount on azure
Posted by lf...@apache.org.
This is an automated email from the ASF dual-hosted git repository.
lfrolov pushed a commit to branch DATALAB-2409
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git
commit 2f278fb9803a35b2a1d5d4be5b562e35196d9da9
Author: leonidfrolov <fr...@gmail.com>
AuthorDate: Wed Oct 6 14:36:24 2021 +0300
[DATALAB-2409]: fixed sed for managing /opt mount on azure
---
infrastructure-provisioning/src/general/lib/azure/actions_lib.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/infrastructure-provisioning/src/general/lib/azure/actions_lib.py b/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
index b83aba7..756dd27 100644
--- a/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
+++ b/infrastructure-provisioning/src/general/lib/azure/actions_lib.py
@@ -1240,7 +1240,7 @@ def ensure_right_mount_paths():
if disk != '' and disk not in datalab.fab.conn.sudo('lsblk | grep -E "(mnt|media)"').stdout and disk not in datalab.fab.conn.sudo("fdisk -l | grep 'BIOS boot'").stdout:
datalab.fab.conn.sudo("umount -l /opt")
datalab.fab.conn.sudo("mount /dev/{}1 /opt".format(disk))
- datalab.fab.conn.sudo('sed -i "/opt/ s|/dev/{}|/dev/{}1|g" /etc/fstab'.format(opt_disk, disk))
+ datalab.fab.conn.sudo('sed -i "/opt/ s|/dev/{}1|/dev/{}1|g" /etc/fstab'.format(opt_disk, disk))
def prepare_vm_for_image(creds=False, os_user='', hostname='', keyfile=''):
if creds:
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org
[incubator-datalab] 06/06: [DATALAB-2409]: fixed scikit_learn
version variable
Posted by lf...@apache.org.
This is an automated email from the ASF dual-hosted git repository.
lfrolov pushed a commit to branch DATALAB-2409
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git
commit e885bad8584f96cdde5bedec83c9ef207c24ebeb
Author: leonidfrolov <fr...@gmail.com>
AuthorDate: Thu Oct 7 13:29:41 2021 +0300
[DATALAB-2409]: fixed scikit_learn version variable
---
infrastructure-provisioning/src/general/conf/datalab.ini | 2 +-
infrastructure-provisioning/src/general/lib/os/fab.py | 6 +++---
2 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/infrastructure-provisioning/src/general/conf/datalab.ini b/infrastructure-provisioning/src/general/conf/datalab.ini
index 7d399e3..1884a1c 100644
--- a/infrastructure-provisioning/src/general/conf/datalab.ini
+++ b/infrastructure-provisioning/src/general/conf/datalab.ini
@@ -120,7 +120,7 @@ matplotlib = 3.4.2
pandas = 1.3.1
sympy = 1.8
pillow = 8.3.1
-scikit-learn = 0.24.2
+scikit_learn = 0.24.2
diff --git a/infrastructure-provisioning/src/general/lib/os/fab.py b/infrastructure-provisioning/src/general/lib/os/fab.py
index 088d81d..e2f3439 100644
--- a/infrastructure-provisioning/src/general/lib/os/fab.py
+++ b/infrastructure-provisioning/src/general/lib/os/fab.py
@@ -334,12 +334,12 @@ def ensure_python_venv(python_venv_version):
pip_command = '/opt/python/python{0}/bin/pip{1}'.format(python_venv_version, python_venv_version[:3])
conn.sudo('''bash -l -c '{0} && {1} install -UI pip=={2}' '''.format(venv_command, pip_command,
os.environ['conf_pip_version']))
- conn.sudo('''bash -l -c '{} && {} install -UI ipython=={} ipykernel=={} NumPy=={} SciPy=={} Matplotlib=={}
- pandas=={} Sympy=={} Pillow=={} scikit-learn=={} --no-cache-dir' '''
+ conn.sudo('''bash -l -c '{} && {} install -UI ipython=={} ipykernel=={} NumPy=={} SciPy=={} Matplotlib=={} '''
+ '''pandas=={} Sympy=={} Pillow=={} scikit-learn=={} --no-cache-dir' '''
.format(venv_command, pip_command, os.environ['pip_packages_ipython'], os.environ['pip_packages_ipykernel'],
os.environ['pip_packages_numpy'], os.environ['pip_packages_scipy'], os.environ['pip_packages_matplotlib'],
os.environ['pip_packages_pandas'], os.environ['pip_packages_sympy'], os.environ['pip_packages_pillow'],
- os.environ['pip_packages_scikit-learn']))
+ os.environ['pip_packages_scikit_learn']))
except Exception as err:
logging.error('Function ensure_python_venv error:', str(err))
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org