You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@datalab.apache.org by lf...@apache.org on 2021/10/07 10:29:53 UTC

[incubator-datalab] 04/06: [DATALAB-2409]: removed r_mirror usage

This is an automated email from the ASF dual-hosted git repository.

lfrolov pushed a commit to branch DATALAB-2409
in repository https://gitbox.apache.org/repos/asf/incubator-datalab.git

commit 4461c32dd2c9a5388cc32825b47cf1d75e648527
Author: leonidfrolov <fr...@gmail.com>
AuthorDate: Wed Oct 6 17:29:21 2021 +0300

    [DATALAB-2409]: removed r_mirror usage
---
 .../src/dataengine/scripts/configure_dataengine.py               | 3 +--
 .../src/deeplearning/scripts/configure_deep_learning_node.py     | 1 -
 .../src/general/lib/os/debian/notebook_lib.py                    | 7 ++-----
 .../src/general/lib/os/redhat/notebook_lib.py                    | 7 ++-----
 .../src/general/scripts/aws/dataengine_configure.py              | 8 ++++----
 .../src/general/scripts/aws/deeplearning_configure.py            | 5 ++---
 .../src/general/scripts/aws/jupyter_configure.py                 | 8 +++-----
 .../src/general/scripts/aws/jupyterlab_configure.py              | 2 --
 .../src/general/scripts/aws/rstudio_configure.py                 | 9 +++------
 .../src/general/scripts/aws/tensor-rstudio_configure.py          | 4 ++--
 .../src/general/scripts/aws/zeppelin_configure.py                | 6 +++---
 .../src/general/scripts/azure/dataengine_configure.py            | 8 ++++----
 .../src/general/scripts/azure/deeplearning_configure.py          | 6 +++---
 .../src/general/scripts/azure/jupyter_configure.py               | 8 ++++----
 .../src/general/scripts/azure/jupyterlab_configure.py            | 6 +++---
 .../src/general/scripts/azure/rstudio_configure.py               | 4 ++--
 .../src/general/scripts/azure/zeppelin_configure.py              | 6 +++---
 .../src/general/scripts/gcp/dataengine_configure.py              | 9 ++++-----
 .../src/general/scripts/gcp/deeplearning_configure.py            | 4 ++--
 .../src/general/scripts/gcp/jupyter_configure.py                 | 4 ++--
 .../src/general/scripts/gcp/jupyterlab_configure.py              | 5 ++---
 .../src/general/scripts/gcp/rstudio_configure.py                 | 4 ++--
 .../src/general/scripts/gcp/tensor-rstudio_configure.py          | 4 ++--
 .../src/general/scripts/gcp/zeppelin_configure.py                | 5 ++---
 .../src/jupyter/scripts/configure_jupyter_node.py                | 3 +--
 .../src/jupyterlab/scripts/configure_jupyterlab_node.py          | 1 -
 .../src/rstudio/scripts/configure_rstudio_node.py                | 3 +--
 .../src/tensor-rstudio/scripts/configure_tensor-rstudio_node.py  | 3 +--
 .../src/zeppelin/scripts/configure_zeppelin_node.py              | 3 +--
 29 files changed, 61 insertions(+), 85 deletions(-)

diff --git a/infrastructure-provisioning/src/dataengine/scripts/configure_dataengine.py b/infrastructure-provisioning/src/dataengine/scripts/configure_dataengine.py
index 08af772..4de0ca3 100644
--- a/infrastructure-provisioning/src/dataengine/scripts/configure_dataengine.py
+++ b/infrastructure-provisioning/src/dataengine/scripts/configure_dataengine.py
@@ -36,7 +36,6 @@ parser.add_argument('--spark_version', type=str, default='')
 parser.add_argument('--hadoop_version', type=str, default='')
 parser.add_argument('--os_user', type=str, default='')
 parser.add_argument('--scala_version', type=str, default='')
-parser.add_argument('--r_mirror', type=str, default='')
 parser.add_argument('--master_ip', type=str, default='')
 parser.add_argument('--node_type', type=str, default='')
 args = parser.parse_args()
@@ -139,7 +138,7 @@ if __name__ == "__main__":
         and os.environ['notebook_r_enabled'] == 'true') \
             or os.environ['application'] in ('rstudio', 'tensor-rstudio'):
         print("Installing R")
-        ensure_r(args.os_user, r_libs, args.region, args.r_mirror)
+        ensure_r(args.os_user, r_libs)
     print("Install Python 3 modules")
     ensure_python3_libraries(args.os_user)
     if os.environ['application'] == 'zeppelin':
diff --git a/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py b/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py
index 54f8601..2c7a88d 100644
--- a/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py
+++ b/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py
@@ -40,7 +40,6 @@ parser.add_argument('--jupyter_version', type=str, default='')
 parser.add_argument('--scala_version', type=str, default='')
 parser.add_argument('--spark_version', type=str, default='')
 parser.add_argument('--hadoop_version', type=str, default='')
-parser.add_argument('--r_mirror', type=str, default='')
 parser.add_argument('--ip_address', type=str, default='')
 parser.add_argument('--exploratory_name', type=str, default='')
 parser.add_argument('--edge_ip', type=str, default='')
diff --git a/infrastructure-provisioning/src/general/lib/os/debian/notebook_lib.py b/infrastructure-provisioning/src/general/lib/os/debian/notebook_lib.py
index 95b8135..2d7a82a 100644
--- a/infrastructure-provisioning/src/general/lib/os/debian/notebook_lib.py
+++ b/infrastructure-provisioning/src/general/lib/os/debian/notebook_lib.py
@@ -79,13 +79,10 @@ def add_marruter_key():
     except:
         sys.exit(1)
 
-def ensure_r(os_user, r_libs, region, r_mirror):
+def ensure_r(os_user, r_libs):
     if not exists(datalab.fab.conn,'/home/' + os_user + '/.ensure_dir/r_ensured'):
         try:
-            if region == 'cn-north-1':
-                r_repository = r_mirror
-            else:
-                r_repository = 'https://cloud.r-project.org'
+            r_repository = 'https://cloud.r-project.org'
             #add_marruter_key()
             datalab.fab.conn.sudo('apt update')
             manage_pkg('-yV install', 'remote', 'libssl-dev libcurl4-gnutls-dev libgit2-dev libxml2-dev libreadline-dev')
diff --git a/infrastructure-provisioning/src/general/lib/os/redhat/notebook_lib.py b/infrastructure-provisioning/src/general/lib/os/redhat/notebook_lib.py
index f90df73..755e87d 100644
--- a/infrastructure-provisioning/src/general/lib/os/redhat/notebook_lib.py
+++ b/infrastructure-provisioning/src/general/lib/os/redhat/notebook_lib.py
@@ -82,13 +82,10 @@ def ensure_r_local_kernel(spark_version, os_user, templates_dir, kernels_dir):
             sys.exit(1)
 
 
-def ensure_r(os_user, r_libs, region, r_mirror):
+def ensure_r(os_user, r_libs):
     if not exists(datalab.fab.conn,'/home/{}/.ensure_dir/r_ensured'.format(os_user)):
         try:
-            if region == 'cn-north-1':
-                r_repository = r_mirror
-            else:
-                r_repository = 'https://cloud.r-project.org'
+            r_repository = 'https://cloud.r-project.org'
             manage_pkg('-y install', 'remote', 'cmake')
             manage_pkg('-y install', 'remote', 'libcur*')
             datalab.fab.conn.sudo('echo -e "[base]\nname=CentOS-7-Base\nbaseurl=http://buildlogs.centos.org/centos/7/os/x86_64-20140704-1/\ngpgcheck=1\ngpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7\npriority=1\nexclude=php mysql" >> /etc/yum.repos.d/CentOS-base.repo')
diff --git a/infrastructure-provisioning/src/general/scripts/aws/dataengine_configure.py b/infrastructure-provisioning/src/general/scripts/aws/dataengine_configure.py
index 47fd12a..e9bbf11 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/dataengine_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/dataengine_configure.py
@@ -100,10 +100,10 @@ def configure_slave(slave_number, data_engine):
     try:
         logging.info('[CONFIGURE SLAVE NODE {}]'.format(slave + 1))
         params = "--hostname {} --keyfile {} --region {} --spark_version {} --hadoop_version {} --os_user {} " \
-                 "--scala_version {} --r_mirror {} --master_ip {} --node_type {}". \
+                 "--scala_version {} --master_ip {} --node_type {}". \
             format(slave_hostname, keyfile_name, data_engine['region'], os.environ['notebook_spark_version'],
                    os.environ['notebook_hadoop_version'], data_engine['datalab_ssh_user'],
-                   os.environ['notebook_scala_version'], os.environ['notebook_r_mirror'], master_node_hostname,
+                   os.environ['notebook_scala_version'], master_node_hostname,
                    'slave')
         try:
             subprocess.run("~/scripts/{}.py {}".format('configure_dataengine', params), shell=True, check=True)
@@ -287,10 +287,10 @@ if __name__ == "__main__":
     try:
         logging.info('[CONFIGURE MASTER NODE]')
         params = "--hostname {} --keyfile {} --region {} --spark_version {} --hadoop_version {} --os_user {} " \
-                 "--scala_version {} --r_mirror {} --master_ip {} --node_type {}".\
+                 "--scala_version {} --master_ip {} --node_type {}".\
             format(master_node_hostname, keyfile_name, data_engine['region'], os.environ['notebook_spark_version'],
                    os.environ['notebook_hadoop_version'], data_engine['datalab_ssh_user'],
-                   os.environ['notebook_scala_version'], os.environ['notebook_r_mirror'], master_node_hostname,
+                   os.environ['notebook_scala_version'], master_node_hostname,
                    'master')
         try:
             subprocess.run("~/scripts/{}.py {}".format('configure_dataengine', params), shell=True, check=True)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/deeplearning_configure.py b/infrastructure-provisioning/src/general/scripts/aws/deeplearning_configure.py
index d1d7c4e..44e3621 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/deeplearning_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/deeplearning_configure.py
@@ -176,12 +176,11 @@ if __name__ == "__main__":
                  "--os_user {2} --jupyter_version {3} " \
                  "--scala_version {4} --spark_version {5} " \
                  "--hadoop_version {6} --region {7} " \
-                 "--r_mirror {8} --ip_address {9} --exploratory_name {10} --edge_ip {11}" \
+                 "--ip_address {8} --exploratory_name {9} --edge_ip {10}" \
             .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
                     os.environ['notebook_jupyter_version'], os.environ['notebook_scala_version'],
                     os.environ['notebook_spark_version'], os.environ['notebook_hadoop_version'],
-                    os.environ['aws_region'], os.environ['notebook_r_mirror'],
-                    notebook_config['ip_address'], notebook_config['exploratory_name'], edge_ip)
+                    os.environ['aws_region'], notebook_config['ip_address'], notebook_config['exploratory_name'], edge_ip)
         try:
             subprocess.run("~/scripts/{}.py {}".format('configure_deep_learning_node', params), shell=True, check=True)
         except:
diff --git a/infrastructure-provisioning/src/general/scripts/aws/jupyter_configure.py b/infrastructure-provisioning/src/general/scripts/aws/jupyter_configure.py
index 0bc9503..cba9911 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/jupyter_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/jupyter_configure.py
@@ -164,10 +164,9 @@ if __name__ == "__main__":
                  "--hadoop_version {4} " \
                  "--os_user {5} " \
                  "--scala_version {6} " \
-                 "--r_mirror {7} " \
-                 "--ip_address {8} " \
-                 "--exploratory_name {9} " \
-                 "--edge_ip {10}".\
+                 "--ip_address {7} " \
+                 "--exploratory_name {8} " \
+                 "--edge_ip {9}".\
             format(instance_hostname,
                    keyfile_name,
                    os.environ['aws_region'],
@@ -175,7 +174,6 @@ if __name__ == "__main__":
                    os.environ['notebook_hadoop_version'],
                    notebook_config['datalab_ssh_user'],
                    os.environ['notebook_scala_version'],
-                   os.environ['notebook_r_mirror'],
                    notebook_config['ip_address'],
                    notebook_config['exploratory_name'],
                    edge_ip)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/jupyterlab_configure.py b/infrastructure-provisioning/src/general/scripts/aws/jupyterlab_configure.py
index 6fe68e3..aa33a49 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/jupyterlab_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/jupyterlab_configure.py
@@ -164,7 +164,6 @@ if __name__ == "__main__":
                  "--hadoop_version {} " \
                  "--os_user {} " \
                  "--scala_version {} " \
-                 "--r_mirror {} " \
                  "--ip_address {} " \
                  "--exploratory_name {}".\
             format(instance_hostname,
@@ -175,7 +174,6 @@ if __name__ == "__main__":
                    os.environ['notebook_hadoop_version'],
                    notebook_config['datalab_ssh_user'],
                    os.environ['notebook_scala_version'],
-                   os.environ['notebook_r_mirror'],
                    notebook_config['ip_address'],
                    notebook_config['exploratory_name'])
         try:
diff --git a/infrastructure-provisioning/src/general/scripts/aws/rstudio_configure.py b/infrastructure-provisioning/src/general/scripts/aws/rstudio_configure.py
index 1f77291..231a13d 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/rstudio_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/rstudio_configure.py
@@ -165,15 +165,12 @@ if __name__ == "__main__":
     # installing and configuring R_STUDIO and all dependencies
     try:
         logging.info('[CONFIGURE R_STUDIO NOTEBOOK INSTANCE]')
-        params = "--hostname {0}  --keyfile {1} " \
-                 "--region {2} --rstudio_pass {3} " \
-                 "--rstudio_version {4} --os_user {5} " \
-                 "--r_mirror {6} --ip_address {7} --exploratory_name {8} --edge_ip {9}" \
+        params = "--hostname {0}  --keyfile {1} --region {2} --rstudio_pass {3} --os_user {5} " \
+                 "--ip_address {6} --exploratory_name {7} --edge_ip {8}" \
             .format(instance_hostname, keyfile_name,
                     os.environ['aws_region'], notebook_config['rstudio_pass'],
                     os.environ['notebook_rstudio_version'], notebook_config['datalab_ssh_user'],
-                    os.environ['notebook_r_mirror'], notebook_config['ip_address'],
-                    notebook_config['exploratory_name'], edge_ip)
+                    notebook_config['ip_address'], notebook_config['exploratory_name'], edge_ip)
         try:
             subprocess.run("~/scripts/{}.py {}".format('configure_rstudio_node', params), shell=True, check=True)
         except:
diff --git a/infrastructure-provisioning/src/general/scripts/aws/tensor-rstudio_configure.py b/infrastructure-provisioning/src/general/scripts/aws/tensor-rstudio_configure.py
index 83f942d..c51b09f 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/tensor-rstudio_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/tensor-rstudio_configure.py
@@ -161,11 +161,11 @@ if __name__ == "__main__":
         params = "--hostname {0}  --keyfile {1} " \
                  "--region {2} --rstudio_pass {3} " \
                  "--rstudio_version {4} --os_user {5} " \
-                 "--r_mirror {6} --ip_address {7} --exploratory_name {8} --edge_ip {9}" \
+                 "--ip_address {6} --exploratory_name {7} --edge_ip {8}" \
             .format(instance_hostname, keyfile_name,
                     os.environ['aws_region'], notebook_config['rstudio_pass'],
                     os.environ['notebook_rstudio_version'], notebook_config['datalab_ssh_user'],
-                    os.environ['notebook_r_mirror'], notebook_config['ip_address'],
+                    notebook_config['ip_address'],
                     notebook_config['exploratory_name'], edge_ip)
         try:
             subprocess.run("~/scripts/{}.py {}".format('configure_tensor-rstudio_node', params), shell=True, check=True)
diff --git a/infrastructure-provisioning/src/general/scripts/aws/zeppelin_configure.py b/infrastructure-provisioning/src/general/scripts/aws/zeppelin_configure.py
index 96105fe..3a12ee6 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/zeppelin_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/zeppelin_configure.py
@@ -177,15 +177,15 @@ if __name__ == "__main__":
                  "--edge_hostname {8} --proxy_port {9} " \
                  "--zeppelin_version {10} --scala_version {11} " \
                  "--livy_version {12} --multiple_clusters {13} " \
-                 "--r_mirror {14} --endpoint_url {15} " \
-                 "--ip_address {16} --exploratory_name {17} --edge_ip {18}" \
+                 "--endpoint_url {14} " \
+                 "--ip_address {15} --exploratory_name {16} --edge_ip {17}" \
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, os.environ['aws_region'],
                     json.dumps(additional_config), notebook_config['datalab_ssh_user'],
                     os.environ['notebook_spark_version'],
                     os.environ['notebook_hadoop_version'], edge_instance_hostname, '3128',
                     os.environ['notebook_zeppelin_version'], os.environ['notebook_scala_version'],
                     os.environ['notebook_livy_version'], os.environ['notebook_multiple_clusters'],
-                    os.environ['notebook_r_mirror'], notebook_config['endpoint_url'], notebook_config['ip_address'],
+                    notebook_config['endpoint_url'], notebook_config['ip_address'],
                     notebook_config['exploratory_name'], edge_ip)
         try:
             subprocess.run("~/scripts/{}.py {}".format('configure_zeppelin_node', params), shell=True, check=True)
diff --git a/infrastructure-provisioning/src/general/scripts/azure/dataengine_configure.py b/infrastructure-provisioning/src/general/scripts/azure/dataengine_configure.py
index 088fbec..27bb216 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/dataengine_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/dataengine_configure.py
@@ -125,10 +125,10 @@ def configure_slave(slave_number, data_engine):
         logging.info('[CONFIGURE SLAVE NODE {}]'.format(slave + 1))
         print('[CONFIGURE SLAVE NODE {}]'.format(slave + 1))
         params = "--hostname {} --keyfile {} --region {} --spark_version {} --hadoop_version {} --os_user {} " \
-                 "--scala_version {} --r_mirror {} --master_ip {} --node_type {}". \
+                 "--scala_version {} --master_ip {} --node_type {}". \
             format(slave_hostname, keyfile_name, data_engine['region'], os.environ['notebook_spark_version'],
                    os.environ['notebook_hadoop_version'], data_engine['datalab_ssh_user'],
-                   os.environ['notebook_scala_version'], os.environ['notebook_r_mirror'], master_node_hostname,
+                   os.environ['notebook_scala_version'], master_node_hostname,
                    'slave')
         try:
             subprocess.run("~/scripts/{}.py {}".format('configure_dataengine', params), shell=True, check=True)
@@ -317,10 +317,10 @@ if __name__ == "__main__":
         logging.info('[CONFIGURE MASTER NODE]')
         print('[CONFIGURE MASTER NODE]')
         params = "--hostname {} --keyfile {} --region {} --spark_version {} --hadoop_version {} --os_user {} " \
-                 "--scala_version {} --r_mirror {} --master_ip {} --node_type {}".\
+                 "--scala_version {} --master_ip {} --node_type {}".\
             format(master_node_hostname, keyfile_name, data_engine['region'], os.environ['notebook_spark_version'],
                    os.environ['notebook_hadoop_version'], data_engine['datalab_ssh_user'],
-                   os.environ['notebook_scala_version'], os.environ['notebook_r_mirror'], master_node_hostname,
+                   os.environ['notebook_scala_version'], master_node_hostname,
                    'master')
         try:
             subprocess.run("~/scripts/{}.py {}".format('configure_dataengine', params), shell=True, check=True)
diff --git a/infrastructure-provisioning/src/general/scripts/azure/deeplearning_configure.py b/infrastructure-provisioning/src/general/scripts/azure/deeplearning_configure.py
index be91e28..c228c97 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/deeplearning_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/deeplearning_configure.py
@@ -209,12 +209,12 @@ if __name__ == "__main__":
                  "--os_user {2} --jupyter_version {3} " \
                  "--scala_version {4} --spark_version {5} " \
                  "--hadoop_version {6} --region {7} " \
-                 "--r_mirror {8} --ip_address {9} --exploratory_name {10} --edge_ip {11}" \
+                 "--ip_address {8} --exploratory_name {9} --edge_ip {10}" \
             .format(instance_hostname, keyfile_name, notebook_config['datalab_ssh_user'],
                     os.environ['notebook_jupyter_version'], os.environ['notebook_scala_version'],
                     os.environ['notebook_spark_version'], os.environ['notebook_hadoop_version'],
-                    os.environ['azure_region'], os.environ['notebook_r_mirror'],
-                    notebook_config['ip_address'], notebook_config['exploratory_name'], edge_hostname)
+                    os.environ['azure_region'], notebook_config['ip_address'],
+                    notebook_config['exploratory_name'], edge_hostname)
         try:
             subprocess.run("~/scripts/{}.py {}".format('configure_deep_learning_node', params), shell=True, check=True)
             datalab.actions_lib.remount_azure_disk(True, notebook_config['datalab_ssh_user'], instance_hostname,
diff --git a/infrastructure-provisioning/src/general/scripts/azure/jupyter_configure.py b/infrastructure-provisioning/src/general/scripts/azure/jupyter_configure.py
index 2e939b9..0f30b71 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/jupyter_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/jupyter_configure.py
@@ -191,13 +191,13 @@ if __name__ == "__main__":
         params = "--hostname {0} --keyfile {1} " \
                  "--region {2} --spark_version {3} " \
                  "--hadoop_version {4} --os_user {5} " \
-                 "--scala_version {6} --r_mirror {7} " \
-                 "--ip_address {8} --exploratory_name {9} --edge_ip {10}".\
+                 "--scala_version {6} " \
+                 "--ip_address {7} --exploratory_name {8} --edge_ip {9}".\
             format(instance_hostname, keyfile_name,
                    os.environ['azure_region'], os.environ['notebook_spark_version'],
                    os.environ['notebook_hadoop_version'], notebook_config['datalab_ssh_user'],
-                   os.environ['notebook_scala_version'], os.environ['notebook_r_mirror'],
-                   notebook_config['ip_address'], notebook_config['exploratory_name'], edge_hostname)
+                   os.environ['notebook_scala_version'], notebook_config['ip_address'],
+                   notebook_config['exploratory_name'], edge_hostname)
         try:
             subprocess.run("~/scripts/{}.py {}".format('configure_jupyter_node', params), shell=True, check=True)
             datalab.actions_lib.remount_azure_disk(True, notebook_config['datalab_ssh_user'], instance_hostname,
diff --git a/infrastructure-provisioning/src/general/scripts/azure/jupyterlab_configure.py b/infrastructure-provisioning/src/general/scripts/azure/jupyterlab_configure.py
index 8c694e1..4d8fe90 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/jupyterlab_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/jupyterlab_configure.py
@@ -191,12 +191,12 @@ if __name__ == "__main__":
         params = "--hostname {0} --keyfile {1} " \
                  "--region {2} --spark_version {3} " \
                  "--hadoop_version {4} --os_user {5} " \
-                 "--scala_version {6} --r_mirror {7} " \
-                 "--ip_address {8} --exploratory_name {9} --edge_ip {10}".\
+                 "--scala_version {6} " \
+                 "--ip_address {7} --exploratory_name {8} --edge_ip {9}".\
             format(instance_hostname, keyfile_name,
                    os.environ['azure_region'], os.environ['notebook_spark_version'],
                    os.environ['notebook_hadoop_version'], notebook_config['datalab_ssh_user'],
-                   os.environ['notebook_scala_version'], os.environ['notebook_r_mirror'],
+                   os.environ['notebook_scala_version'],
                    notebook_config['ip_address'], notebook_config['exploratory_name'], edge_hostname)
         try:
             subprocess.run("~/scripts/{}.py {}".format('configure_jupyterlab_node', params), shell=True, check=True)
diff --git a/infrastructure-provisioning/src/general/scripts/azure/rstudio_configure.py b/infrastructure-provisioning/src/general/scripts/azure/rstudio_configure.py
index 4ed3ad7..bbf85e5 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/rstudio_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/rstudio_configure.py
@@ -192,11 +192,11 @@ if __name__ == "__main__":
         params = "--hostname {0}  --keyfile {1} " \
                  "--region {2} --rstudio_pass {3} " \
                  "--rstudio_version {4} --os_user {5} " \
-                 "--r_mirror {6} --ip_address {7} --exploratory_name {8} --edge_ip {9} " \
+                 "--ip_address {6} --exploratory_name {7} --edge_ip {8} " \
             .format(instance_hostname, keyfile_name,
                     os.environ['azure_region'], notebook_config['rstudio_pass'],
                     os.environ['notebook_rstudio_version'], notebook_config['datalab_ssh_user'],
-                    os.environ['notebook_r_mirror'], notebook_config['ip_address'],
+                    notebook_config['ip_address'],
                     notebook_config['exploratory_name'], edge_hostname)
         try:
             subprocess.run("~/scripts/{}.py {}".format('configure_rstudio_node', params), shell=True, check=True)
diff --git a/infrastructure-provisioning/src/general/scripts/azure/zeppelin_configure.py b/infrastructure-provisioning/src/general/scripts/azure/zeppelin_configure.py
index b4a29b4..536955b 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/zeppelin_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/zeppelin_configure.py
@@ -200,14 +200,14 @@ if __name__ == "__main__":
                  "--edge_hostname {8} --proxy_port {9} " \
                  "--zeppelin_version {10} --scala_version {11} " \
                  "--livy_version {12} --multiple_clusters {13} " \
-                 "--r_mirror {14} --endpoint_url {15} " \
-                 "--ip_address {16} --exploratory_name {17} --edge_ip {18} " \
+                 "--endpoint_url {14} " \
+                 "--ip_address {15} --exploratory_name {16} --edge_ip {17} " \
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, os.environ['azure_region'],
                     json.dumps(additional_config), notebook_config['datalab_ssh_user'],
                     os.environ['notebook_spark_version'], os.environ['notebook_hadoop_version'],
                     edge_instance_private_hostname, '3128', os.environ['notebook_zeppelin_version'],
                     os.environ['notebook_scala_version'], os.environ['notebook_livy_version'],
-                    os.environ['notebook_multiple_clusters'], os.environ['notebook_r_mirror'], 'null',
+                    os.environ['notebook_multiple_clusters'], 'null',
                     notebook_config['ip_address'], notebook_config['exploratory_name'], edge_hostname)
         try:
             subprocess.run("~/scripts/{}.py {}".format('configure_zeppelin_node', params), shell=True, check=True)
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/dataengine_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/dataengine_configure.py
index 87e6bb2..5a33caa 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine_configure.py
@@ -109,11 +109,10 @@ def configure_slave(slave_number, data_engine):
         logging.info('[CONFIGURE SLAVE NODE {}]'.format(slave + 1))
         print('[CONFIGURE SLAVE NODE {}]'.format(slave + 1))
         params = "--hostname {} --keyfile {} --region {} --spark_version {} --hadoop_version {} --os_user {} " \
-                 "--scala_version {} --r_mirror {} --master_ip {} --node_type {}". \
+                 "--scala_version {} --master_ip {} --node_type {}". \
             format(slave_hostname, keyfile_name, data_engine['region'], os.environ['notebook_spark_version'],
                    os.environ['notebook_hadoop_version'], data_engine['datalab_ssh_user'],
-                   os.environ['notebook_scala_version'], os.environ['notebook_r_mirror'], master_node_hostname,
-                   'slave')
+                   os.environ['notebook_scala_version'], master_node_hostname, 'slave')
         try:
             subprocess.run("~/scripts/{}.py {}".format('configure_dataengine', params), shell=True, check=True)
         except:
@@ -299,10 +298,10 @@ if __name__ == "__main__":
         logging.info('[CONFIGURE MASTER NODE]')
         print('[CONFIGURE MASTER NODE]')
         params = "--hostname {} --keyfile {} --region {} --spark_version {} --hadoop_version {} --os_user {} " \
-                 "--scala_version {} --r_mirror {} --master_ip {} --node_type {}".\
+                 "--scala_version {} --master_ip {} --node_type {}".\
             format(master_node_hostname, keyfile_name, data_engine['region'], os.environ['notebook_spark_version'],
                    os.environ['notebook_hadoop_version'], data_engine['datalab_ssh_user'],
-                   os.environ['notebook_scala_version'], os.environ['notebook_r_mirror'], master_node_hostname,
+                   os.environ['notebook_scala_version'], master_node_hostname,
                    'master')
         try:
             subprocess.run("~/scripts/{}.py {}".format('configure_dataengine', params), shell=True, check=True)
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/deeplearning_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/deeplearning_configure.py
index 3be0192..5f8fea4 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/deeplearning_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/deeplearning_configure.py
@@ -162,11 +162,11 @@ if __name__ == "__main__":
                  "--os_user {} --jupyter_version {} " \
                  "--scala_version {} --spark_version {} " \
                  "--hadoop_version {} --region {} " \
-                 "--r_mirror {} --exploratory_name {} --edge_ip {}" \
+                 "--exploratory_name {} --edge_ip {}" \
             .format(instance_hostname, notebook_config['ssh_key_path'], notebook_config['datalab_ssh_user'],
                     os.environ['notebook_jupyter_version'], os.environ['notebook_scala_version'],
                     os.environ['notebook_spark_version'], os.environ['notebook_hadoop_version'],
-                    os.environ['gcp_region'], os.environ['notebook_r_mirror'],
+                    os.environ['gcp_region'],
                     notebook_config['exploratory_name'], edge_instance_private_ip)
         try:
             subprocess.run("~/scripts/{}.py {}".format('configure_deep_learning_node', params), shell=True, check=True)
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/jupyter_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/jupyter_configure.py
index 0ede3eb..e27f16b 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/jupyter_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/jupyter_configure.py
@@ -162,13 +162,13 @@ if __name__ == "__main__":
         params = "--hostname {} --keyfile {} " \
                  "--region {} --spark_version {} " \
                  "--hadoop_version {} --os_user {} " \
-                 "--scala_version {} --r_mirror {} " \
+                 "--scala_version {} " \
                  "--exploratory_name {} "\
                  "--edge_ip {}".\
             format(instance_hostname, notebook_config['ssh_key_path'],
                    os.environ['gcp_region'], os.environ['notebook_spark_version'],
                    os.environ['notebook_hadoop_version'], notebook_config['datalab_ssh_user'],
-                   os.environ['notebook_scala_version'], os.environ['notebook_r_mirror'],
+                   os.environ['notebook_scala_version'],
                    notebook_config['exploratory_name'], edge_instance_private_ip)
         try:
             subprocess.run("~/scripts/{}.py {}".format('configure_jupyter_node', params), shell=True, check=True)
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/jupyterlab_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/jupyterlab_configure.py
index 4207257..d1f1db1 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/jupyterlab_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/jupyterlab_configure.py
@@ -162,13 +162,12 @@ if __name__ == "__main__":
         params = "--hostname {} --keyfile {} --edge_ip {} " \
                  "--region {} --spark_version {} " \
                  "--hadoop_version {} --os_user {} " \
-                 "--scala_version {} --r_mirror {} " \
+                 "--scala_version {} " \
                  "--exploratory_name {}".\
             format(instance_hostname, notebook_config['ssh_key_path'], edge_instance_private_ip,
                    os.environ['gcp_region'], os.environ['notebook_spark_version'],
                    os.environ['notebook_hadoop_version'], notebook_config['datalab_ssh_user'],
-                   os.environ['notebook_scala_version'], os.environ['notebook_r_mirror'],
-                   notebook_config['exploratory_name'], )
+                   os.environ['notebook_scala_version'], notebook_config['exploratory_name'], )
         try:
             subprocess.run("~/scripts/{}.py {}".format('configure_jupyterlab_node', params), shell=True, check=True)
         except:
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/rstudio_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/rstudio_configure.py
index 8b51ca0..3991d50 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/rstudio_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/rstudio_configure.py
@@ -166,11 +166,11 @@ if __name__ == "__main__":
         params = "--hostname {0}  --keyfile {1} " \
                  "--region {2} --rstudio_pass {3} " \
                  "--rstudio_version {4} --os_user {5} " \
-                 "--r_mirror {6} --ip_address {7} --exploratory_name {8} --edge_ip {9}" \
+                 "--ip_address {6} --exploratory_name {7} --edge_ip {8}" \
             .format(instance_hostname, notebook_config['ssh_key_path'],
                     os.environ['gcp_region'], notebook_config['rstudio_pass'],
                     os.environ['notebook_rstudio_version'], notebook_config['datalab_ssh_user'],
-                    os.environ['notebook_r_mirror'], notebook_config['ip_address'],
+                    notebook_config['ip_address'],
                     notebook_config['exploratory_name'], edge_instance_private_ip)
         try:
             subprocess.run("~/scripts/{}.py {}".format('configure_rstudio_node', params), shell=True, check=True)
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/tensor-rstudio_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/tensor-rstudio_configure.py
index 56ddd56..f201944 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/tensor-rstudio_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/tensor-rstudio_configure.py
@@ -169,11 +169,11 @@ if __name__ == "__main__":
         params = "--hostname {}  --keyfile {} " \
                  "--region {} --rstudio_pass {} " \
                  "--rstudio_version {} --os_user {} " \
-                 "--r_mirror {} --exploratory_name {} --edge_ip {}" \
+                 "--exploratory_name {} --edge_ip {}" \
             .format(instance_hostname, notebook_config['ssh_key_path'],
                     os.environ['gcp_region'], notebook_config['rstudio_pass'],
                     os.environ['notebook_rstudio_version'], notebook_config['datalab_ssh_user'],
-                    os.environ['notebook_r_mirror'], notebook_config['exploratory_name'], edge_instance_private_ip)
+                    notebook_config['exploratory_name'], edge_instance_private_ip)
         try:
             subprocess.run("~/scripts/{}.py {}".format('configure_tensor-rstudio_node', params), shell=True, check=True)
         except:
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/zeppelin_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/zeppelin_configure.py
index c55f2e4..a9fe2b4 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/zeppelin_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/zeppelin_configure.py
@@ -170,7 +170,7 @@ if __name__ == "__main__":
                  "--edge_hostname {} --proxy_port {} " \
                  "--zeppelin_version {} --scala_version {} " \
                  "--livy_version {} --multiple_clusters {} " \
-                 "--r_mirror {} --endpoint_url {} " \
+                 "--endpoint_url {} " \
                  "--exploratory_name {} " \
                  "--edge_ip {}" \
             .format(instance_hostname, notebook_config['instance_name'], notebook_config['ssh_key_path'],
@@ -178,8 +178,7 @@ if __name__ == "__main__":
                     os.environ['notebook_spark_version'], os.environ['notebook_hadoop_version'], edge_instance_name,
                     '3128', os.environ['notebook_zeppelin_version'], os.environ['notebook_scala_version'],
                     os.environ['notebook_livy_version'], os.environ['notebook_multiple_clusters'],
-                    os.environ['notebook_r_mirror'], 'null',
-                    notebook_config['exploratory_name'], edge_instance_private_ip)
+                    'null', notebook_config['exploratory_name'], edge_instance_private_ip)
         try:
             subprocess.run("~/scripts/{}.py {}".format('configure_zeppelin_node', params), shell=True, check=True)
         except:
diff --git a/infrastructure-provisioning/src/jupyter/scripts/configure_jupyter_node.py b/infrastructure-provisioning/src/jupyter/scripts/configure_jupyter_node.py
index 76c5fbf..3369ab2 100644
--- a/infrastructure-provisioning/src/jupyter/scripts/configure_jupyter_node.py
+++ b/infrastructure-provisioning/src/jupyter/scripts/configure_jupyter_node.py
@@ -36,7 +36,6 @@ parser.add_argument('--spark_version', type=str, default='')
 parser.add_argument('--hadoop_version', type=str, default='')
 parser.add_argument('--os_user', type=str, default='')
 parser.add_argument('--scala_version', type=str, default='')
-parser.add_argument('--r_mirror', type=str, default='')
 parser.add_argument('--ip_address', type=str, default='')
 parser.add_argument('--exploratory_name', type=str, default='')
 parser.add_argument('--edge_ip', type=str, default='')
@@ -93,7 +92,7 @@ if __name__ == "__main__":
     ensure_scala(scala_link, args.scala_version, args.os_user)
     if os.environ['notebook_r_enabled'] == 'true':
         print("Installing R")
-        ensure_r(args.os_user, r_libs, args.region, args.r_mirror)
+        ensure_r(args.os_user, r_libs)
     print("Install Python 3 modules")
     ensure_python3_libraries(args.os_user)
 
diff --git a/infrastructure-provisioning/src/jupyterlab/scripts/configure_jupyterlab_node.py b/infrastructure-provisioning/src/jupyterlab/scripts/configure_jupyterlab_node.py
index 928f868..e8b5862 100644
--- a/infrastructure-provisioning/src/jupyterlab/scripts/configure_jupyterlab_node.py
+++ b/infrastructure-provisioning/src/jupyterlab/scripts/configure_jupyterlab_node.py
@@ -37,7 +37,6 @@ parser.add_argument('--spark_version', type=str, default='')
 parser.add_argument('--hadoop_version', type=str, default='')
 parser.add_argument('--os_user', type=str, default='')
 parser.add_argument('--scala_version', type=str, default='')
-parser.add_argument('--r_mirror', type=str, default='')
 parser.add_argument('--ip_address', type=str, default='')
 parser.add_argument('--exploratory_name', type=str, default='')
 args = parser.parse_args()
diff --git a/infrastructure-provisioning/src/rstudio/scripts/configure_rstudio_node.py b/infrastructure-provisioning/src/rstudio/scripts/configure_rstudio_node.py
index df84c99..397a22b 100644
--- a/infrastructure-provisioning/src/rstudio/scripts/configure_rstudio_node.py
+++ b/infrastructure-provisioning/src/rstudio/scripts/configure_rstudio_node.py
@@ -39,7 +39,6 @@ parser.add_argument('--region', type=str, default='')
 parser.add_argument('--os_user', type=str, default='')
 parser.add_argument('--rstudio_pass', type=str, default='')
 parser.add_argument('--rstudio_version', type=str, default='')
-parser.add_argument('--r_mirror', type=str, default='')
 parser.add_argument('--ip_address', type=str, default='')
 parser.add_argument('--exploratory_name', type=str, default='')
 parser.add_argument('--edge_ip', type=str, default='')
@@ -86,7 +85,7 @@ if __name__ == "__main__":
     print("Install Java")
     ensure_jre_jdk(args.os_user)
     print("Install R")
-    ensure_r(args.os_user, r_libs, args.region, args.r_mirror)
+    ensure_r(args.os_user, r_libs)
     print("Install Python 3 modules")
     ensure_python3_libraries(args.os_user)
 
diff --git a/infrastructure-provisioning/src/tensor-rstudio/scripts/configure_tensor-rstudio_node.py b/infrastructure-provisioning/src/tensor-rstudio/scripts/configure_tensor-rstudio_node.py
index e3398a4..e1b7cf4 100644
--- a/infrastructure-provisioning/src/tensor-rstudio/scripts/configure_tensor-rstudio_node.py
+++ b/infrastructure-provisioning/src/tensor-rstudio/scripts/configure_tensor-rstudio_node.py
@@ -39,7 +39,6 @@ parser.add_argument('--region', type=str, default='')
 parser.add_argument('--os_user', type=str, default='')
 parser.add_argument('--rstudio_pass', type=str, default='')
 parser.add_argument('--rstudio_version', type=str, default='')
-parser.add_argument('--r_mirror', type=str, default='')
 parser.add_argument('--ip_address', type=str, default='')
 parser.add_argument('--exploratory_name', type=str, default='')
 parser.add_argument('--edge_ip', type=str, default='')
@@ -94,7 +93,7 @@ if __name__ == "__main__":
     print("Install Java")
     ensure_jre_jdk(args.os_user)
     print("Install R")
-    ensure_r(args.os_user, r_libs, args.region, args.r_mirror)
+    ensure_r(args.os_user, r_libs)
     print("Install Python 3 modules")
     ensure_python3_libraries(args.os_user)
 
diff --git a/infrastructure-provisioning/src/zeppelin/scripts/configure_zeppelin_node.py b/infrastructure-provisioning/src/zeppelin/scripts/configure_zeppelin_node.py
index edbe969..896bc08 100644
--- a/infrastructure-provisioning/src/zeppelin/scripts/configure_zeppelin_node.py
+++ b/infrastructure-provisioning/src/zeppelin/scripts/configure_zeppelin_node.py
@@ -47,7 +47,6 @@ parser.add_argument('--proxy_port', type=str, default='')
 parser.add_argument('--scala_version', type=str, default='')
 parser.add_argument('--livy_version', type=str, default='')
 parser.add_argument('--multiple_clusters', type=str, default='')
-parser.add_argument('--r_mirror', type=str, default='')
 parser.add_argument('--endpoint_url', type=str, default='')
 parser.add_argument('--ip_address', type=str, default='')
 parser.add_argument('--exploratory_name', type=str, default='')
@@ -226,7 +225,7 @@ if __name__ == "__main__":
     ensure_scala(scala_link, args.scala_version, args.os_user)
     if os.environ['notebook_r_enabled'] == 'true':
         print("Installing R")
-        ensure_r(args.os_user, r_libs, args.region, args.r_mirror)
+        ensure_r(args.os_user, r_libs)
     print("Install Python 3 modules")
     ensure_python3_libraries(args.os_user)
 

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@datalab.apache.org
For additional commands, e-mail: commits-help@datalab.apache.org