You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@dlab.apache.org by om...@apache.org on 2019/02/19 15:01:42 UTC

[incubator-dlab] branch EPMCDLAB-1186 updated: changed parameters

This is an automated email from the ASF dual-hosted git repository.

omartushevskyi pushed a commit to branch EPMCDLAB-1186
in repository https://gitbox.apache.org/repos/asf/incubator-dlab.git


The following commit(s) were added to refs/heads/EPMCDLAB-1186 by this push:
     new 180ea49  changed parameters
180ea49 is described below

commit 180ea495b20f9158b841c5f703d9a075e3e9c6b9
Author: Oleh Martushevskyi <Ol...@epam.com>
AuthorDate: Tue Feb 19 16:58:48 2019 +0200

    changed parameters
---
 infrastructure-provisioning/scripts/deploy_dlab.py |  34 +++---
 .../src/base/scripts/install_prerequisites.py      |  18 ++--
 .../scripts/configure_deep_learning_node.py        |   7 +-
 .../src/general/conf/dlab.ini                      |  32 +++---
 .../src/general/files/os/debian/sources.list       |  40 +++----
 .../src/general/lib/aws/actions_lib.py             |  19 ++--
 .../src/general/lib/os/debian/common_lib.py        |  11 +-
 .../src/general/lib/os/debian/edge_lib.py          |   9 +-
 .../src/general/lib/os/debian/notebook_lib.py      | 120 ++++++++-------------
 .../src/general/lib/os/debian/ssn_lib.py           |  59 ++++------
 .../src/general/lib/os/fab.py                      |  67 +++++-------
 .../src/general/scripts/aws/edge_prepare.py        |   2 +-
 .../jupyter_dataengine-service_create_configs.py   |  46 ++++----
 .../jupyter_install_dataengine-service_kernels.py  |  11 +-
 .../src/general/scripts/aws/ssn_create_endpoint.py |   2 +-
 .../zeppelin_dataengine-service_create_configs.py  |  13 +--
 .../zeppelin_install_dataengine-service_kernels.py |  14 +--
 .../general/scripts/os/common_clean_instance.py    |   2 +-
 .../os/deeplearning_dataengine_create_configs.py   |  17 ++-
 .../os/deeplearning_install_dataengine_kernels.py  |  14 +--
 .../os/jupyter_dataengine_create_configs.py        |  15 ++-
 .../os/jupyter_install_dataengine_kernels.py       |  13 +--
 .../os/rstudio_dataengine_create_configs.py        |  10 +-
 .../os/rstudio_install_dataengine_kernels.py       |  14 +--
 .../os/tensor-rstudio_dataengine_create_configs.py |  12 +--
 .../tensor-rstudio_install_dataengine_kernels.py   |  14 +--
 .../scripts/os/tensor_dataengine_create_configs.py |  17 ++-
 .../os/tensor_install_dataengine_kernels.py        |  14 +--
 .../os/zeppelin_dataengine_create_configs.py       |  19 ++--
 .../os/zeppelin_install_dataengine_kernels.py      |  14 +--
 .../src/general/templates/aws/Rprofile.site        |   2 +-
 .../src/jupyter/scripts/configure_jupyter_node.py  |  18 ++--
 .../src/rstudio/scripts/configure_rstudio_node.py  |   7 +-
 .../src/ssn/scripts/configure_docker.py            |   9 +-
 .../src/ssn/scripts/configure_ui.py                |  12 +--
 .../src/ssn/templates/settings.xml                 |   2 +-
 .../scripts/configure_tensor-rstudio_node.py       |   7 +-
 .../src/tensor/scripts/configure_tensor_node.py    |   7 +-
 .../zeppelin/scripts/configure_zeppelin_node.py    |  27 ++---
 39 files changed, 309 insertions(+), 461 deletions(-)

diff --git a/infrastructure-provisioning/scripts/deploy_dlab.py b/infrastructure-provisioning/scripts/deploy_dlab.py
index 8f3f98d..003b869 100644
--- a/infrastructure-provisioning/scripts/deploy_dlab.py
+++ b/infrastructure-provisioning/scripts/deploy_dlab.py
@@ -118,8 +118,9 @@ parser.add_argument('--ldap_ou', type=str, default='ou=People', help='Ldap organ
 parser.add_argument('--ldap_service_username', type=str, default='cn=service-user', help='Ldap service user name')
 parser.add_argument('--ldap_service_password', type=str, default='service-user-password',
                     help='Ldap password for admin user')
-parser.add_argument('--local_repository_host', type=str, default='', help='IP address or DNS name of DLab repository. '
-                                                                          'For example: 10.10.10.10')
+parser.add_argument('--local_repository_enabled', type=str, default='False', help='True - to use local repository. '
+                                                                                  'Otherwise - False')
+parser.add_argument('--local_repository_host', type=str, default='', help='IP address or DNS name of repository')
 parser.add_argument('--local_repository_cert_path', type=str, default='', help='Full path to cert')
 parser.add_argument('--local_repository_parent_proxy_host', type=str, default='', help='IP address or DNS name of '
                                                                                        'parent proxy')
@@ -127,34 +128,33 @@ parser.add_argument('--local_repository_parent_proxy_port', type=str, default='3
                                                                                            'parent proxy')
 parser.add_argument('--local_repository_nginx_proxy_host', type=str, default='', help='IP address or DNS name of '
                                                                                       'Nginx proxy')
-parser.add_argument('--local_repository_prefix', type=str, default='', help='Prefix of DLab repository')
-parser.add_argument('--local_repository_apt_bintray_repo', type=str, default='apt-bintray',
+parser.add_argument('--local_repository_apt_bintray_repo', type=str, default='',
                     help='Repository name for APT Bintray')
-parser.add_argument('--local_repository_apt_ubuntu_security_repo', type=str, default='apt-security',
+parser.add_argument('--local_repository_apt_ubuntu_security_repo', type=str, default='',
                     help='Repository name for APT security')
-parser.add_argument('--local_repository_apt_ubuntu_repo', type=str, default='apt-ubuntu',
+parser.add_argument('--local_repository_apt_ubuntu_repo', type=str, default='',
                     help='Repository name for APT')
-parser.add_argument('--local_repository_docker_internal_repo', type=str, default='docker-internal',
+parser.add_argument('--local_repository_docker_internal_repo', type=str, default='',
                     help='Internal repository name for Docker')
-parser.add_argument('--local_repository_docker_repo', type=str, default='docker',
+parser.add_argument('--local_repository_docker_repo', type=str, default='',
                     help='Repository name for Docker')
-parser.add_argument('--local_repository_jenkins_repo', type=str, default='jenkins',
+parser.add_argument('--local_repository_jenkins_repo', type=str, default='',
                     help='Repository name for Jenkins')
-parser.add_argument('--local_repository_maven_bintray_repo', type=str, default='maven-bintray',
+parser.add_argument('--local_repository_maven_bintray_repo', type=str, default='',
                     help='Repository_name for Maven bintray')
-parser.add_argument('--local_repository_maven_central_repo', type=str, default='maven-central',
+parser.add_argument('--local_repository_maven_central_repo', type=str, default='',
                     help='Repository name for Maven central')
-parser.add_argument('--local_repository_mongo_repo', type=str, default='mongo',
+parser.add_argument('--local_repository_mongo_repo', type=str, default='',
                     help='Repository name for Mongo')
-parser.add_argument('--local_repository_npm_repo', type=str, default='npm',
+parser.add_argument('--local_repository_npm_repo', type=str, default='',
                     help='Repository for NPM')
-parser.add_argument('--local_repository_packages_repo', type=str, default='packages',
+parser.add_argument('--local_repository_packages_repo', type=str, default='',
                     help='Repository name for packages')
-parser.add_argument('--local_repository_pypi_repo', type=str, default='pypi',
+parser.add_argument('--local_repository_pypi_repo', type=str, default='',
                     help='Repository name for PyPi')
-parser.add_argument('--local_repository_r_repo', type=str, default='r',
+parser.add_argument('--local_repository_r_repo', type=str, default='',
                     help='Repository name for R')
-parser.add_argument('--local_repository_rrutter_repo', type=str, default='rrutter',
+parser.add_argument('--local_repository_rrutter_repo', type=str, default='',
                     help='Repository name for Rrutter')
 parser.add_argument('--tags', type=str, default='Operation,ItemDescription', help='Column name in report file that '
                                                                                   'contains tags')
diff --git a/infrastructure-provisioning/src/base/scripts/install_prerequisites.py b/infrastructure-provisioning/src/base/scripts/install_prerequisites.py
index b4f4db3..3886740 100644
--- a/infrastructure-provisioning/src/base/scripts/install_prerequisites.py
+++ b/infrastructure-provisioning/src/base/scripts/install_prerequisites.py
@@ -40,13 +40,13 @@ parser.add_argument('--region', type=str, default='')
 args = parser.parse_args()
 
 
-def update_pip_repository_configuration(repository_host):
+def update_pip_repository_configuration(repository, repository_host):
     if not exists('/home/{}/pip_conf_update_ensured'.format(args.user)):
         sudo('touch /etc/pip.conf')
         sudo('echo "[global]" > /etc/pip.conf')
         sudo('echo "timeout = 600" >> /etc/pip.conf')
-        sudo('echo "index-url = https://{}/simple/" >> /etc/pip.conf'.format(repository_host))
-        sudo('echo "trusted-host = {}" >> /etc/pip.conf'.format(repository_host.split('/')[0]))
+        sudo('echo "index-url = {}/simple/" >> /etc/pip.conf'.format(repository))
+        sudo('echo "trusted-host = {}" >> /etc/pip.conf'.format(repository_host))
         sudo('touch /home/{}/pip_conf_update_ensured'.format(args.user))
 
 
@@ -59,15 +59,15 @@ if __name__ == "__main__":
 
     if args.region == 'cn-north-1':
         update_apt_repository_configuration('http://mirrors.aliyun.com/ubuntu/')
-        update_pip_repository_configuration(os.environ['conf_pypi_mirror'])
+        update_pip_repository_configuration('https://{}'.format(os.environ['conf_pypi_mirror']),
+                                            os.environ['conf_pypi_mirror'])
 
     if 'local_repository_cert_path' in os.environ:
         add_repository_cert()
-    if 'local_repository_host' in os.environ:
-        update_apt_repository_configuration(os.environ['local_repository_host'])
-        update_pip_repository_configuration('{}/{}/{}'.format(os.environ['local_repository_host'],
-                                                              os.environ['local_repository_prefix'],
-                                                              os.environ['local_repository_pypi_repo']))
+    if os.environ['local_repository_enabled'] == 'True':
+        update_apt_repository_configuration()
+        update_pip_repository_configuration('{}'.format(os.environ['local_repository_pypi_repo']),
+                                            os.environ['local_repository_host'])
 
     print("Updating hosts file")
     update_hosts_file(args.user)
diff --git a/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py b/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py
index e0ef508..12f46cd 100644
--- a/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py
+++ b/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py
@@ -66,10 +66,9 @@ if args.region == 'cn-north-1':
 else:
     spark_link = "https://archive.apache.org/dist/spark/spark-" + spark_version + "/spark-" + spark_version + \
                  "-bin-hadoop" + hadoop_version + ".tgz"
-if 'local_repository_host' in os.environ:
-    spark_link = "https://{0}/{3}/{4}/spark-{1}-bin-hadoop{2}.tgz".format(
-        os.environ['local_repository_host'], spark_version, hadoop_version, os.environ['local_repository_prefix'],
-        os.environ['local_repository_packages_repo'])
+if os.environ['local_repository_enabled'] == 'True':
+    spark_link = "{0}/spark-{1}-bin-hadoop{2}.tgz".format(
+        os.environ['local_repository_packages_repo'], spark_version, hadoop_version)
 local_spark_path = '/opt/spark/'
 jars_dir = '/opt/jars/'
 files_dir = '/root/files/'
diff --git a/infrastructure-provisioning/src/general/conf/dlab.ini b/infrastructure-provisioning/src/general/conf/dlab.ini
index 28b5b3c..7e0e09d 100644
--- a/infrastructure-provisioning/src/general/conf/dlab.ini
+++ b/infrastructure-provisioning/src/general/conf/dlab.ini
@@ -327,6 +327,8 @@ nginx_version = 1.15.1
 
 #--- [local_repository] local repository settings ---#
 [local_repository]
+### True - to use local repository. Otherwise - False
+# enabled =
 ### IP address or DNS name of repository
 # host =
 ### Full path to repository certificate
@@ -337,34 +339,32 @@ nginx_version = 1.15.1
 # parent_proxy_port =
 ### Nginx reverse proxy host
 # nginx_proxy_host =
-### Repository prefix
-# prefix =
 ### Repository name for APT Bintray
-apt_bintray_repo = apt-bintray
+# apt_bintray_repo =
 ### Repository name for APT security
-apt_ubuntu_security_repo = apt-security
+# apt_ubuntu_security_repo =
 ### Repository name for APT
-apt_ubuntu_repo = apt-ubuntu
+# apt_ubuntu_repo =
 ### Internal repository name for Docker
-docker_internal_repo = docker-internal
+# docker_internal_repo =
 ### Repository name for Docker
-docker_repo = docker
+# docker_repo =
 ### Repository name for Jenkins
-jenkins_repo = jenkins
+# jenkins_repo =
 ### Repository_name for Maven bintray
-maven_bintray_repo = maven-bintray
+# maven_bintray_repo =
 ### Repository name for Maven central
-maven_central_repo = maven-central
+# maven_central_repo =
 ### Repository name for Mongo
-mongo_repo = mongo
+# mongo_repo =
 ### Repository for NPM
-npm_repo = npm
+# npm_repo =
 ### Repository name for packages
-packages_repo = packages
+# packages_repo =
 ### Repository name for PyPi
-pypi_repo = pypi
+# pypi_repo =
 ### Repository name for R
-r_repo = r
+# r_repo =
 ### Repository name for Rrutter
-rrutter_repo = rrutter
+# rrutter_repo =
 
diff --git a/infrastructure-provisioning/src/general/files/os/debian/sources.list b/infrastructure-provisioning/src/general/files/os/debian/sources.list
index ba22f09..1e6c0a1 100644
--- a/infrastructure-provisioning/src/general/files/os/debian/sources.list
+++ b/infrastructure-provisioning/src/general/files/os/debian/sources.list
@@ -18,46 +18,46 @@
 
 # See http://help.ubuntu.com/community/UpgradeNotes for how to upgrade to
 # newer versions of the distribution.
-deb https://REPOSITORY_UBUNTU xenial main restricted
-deb-src https://REPOSITORY_UBUNTU xenial main restricted
+deb REPOSITORY_UBUNTU xenial main restricted
+deb-src REPOSITORY_UBUNTU xenial main restricted
 
 ## Major bug fix updates produced after the final release of the
 ## distribution.
-deb https://REPOSITORY_UBUNTU xenial-updates main restricted
-deb-src https://REPOSITORY_UBUNTU xenial-updates main restricted
+deb REPOSITORY_UBUNTU xenial-updates main restricted
+deb-src REPOSITORY_UBUNTU xenial-updates main restricted
 
 ## N.B. software from this repository is ENTIRELY UNSUPPORTED by the Ubuntu
 ## team. Also, please note that software in universe WILL NOT receive any
 ## review or updates from the Ubuntu security team.
-deb https://REPOSITORY_UBUNTU xenial universe
-deb-src https://REPOSITORY_UBUNTU xenial universe
-deb https://REPOSITORY_UBUNTU xenial-updates universe
-deb-src https://REPOSITORY_UBUNTU xenial-updates universe
+deb REPOSITORY_UBUNTU xenial universe
+deb-src REPOSITORY_UBUNTU xenial universe
+deb REPOSITORY_UBUNTU xenial-updates universe
+deb-src REPOSITORY_UBUNTU xenial-updates universe
 
 ## N.B. software from this repository is ENTIRELY UNSUPPORTED by the Ubuntu
 ## team, and may not be under a free licence. Please satisfy yourself as to
 ## your rights to use the software. Also, please note that software in
 ## multiverse WILL NOT receive any review or updates from the Ubuntu
 ## security team.
-deb https://REPOSITORY_UBUNTU xenial multiverse
-deb-src https://REPOSITORY_UBUNTU xenial multiverse
-deb https://REPOSITORY_UBUNTU xenial-updates multiverse
-deb-src https://REPOSITORY_UBUNTU xenial-updates multiverse
+deb REPOSITORY_UBUNTU xenial multiverse
+deb-src REPOSITORY_UBUNTU xenial multiverse
+deb REPOSITORY_UBUNTU xenial-updates multiverse
+deb-src REPOSITORY_UBUNTU xenial-updates multiverse
 
 ## N.B. software from this repository may not have been tested as
 ## extensively as that contained in the main release, although it includes
 ## newer versions of some applications which may provide useful features.
 ## Also, please note that software in backports WILL NOT receive any review
 ## or updates from the Ubuntu security team.
-deb https://REPOSITORY_UBUNTU xenial-backports main restricted universe multiverse
-deb-src https://REPOSITORY_UBUNTU xenial-backports main restricted universe multiverse
+deb REPOSITORY_UBUNTU xenial-backports main restricted universe multiverse
+deb-src REPOSITORY_UBUNTU xenial-backports main restricted universe multiverse
 
-deb https://REPOSITORY_SECURITY_UBUNTU xenial-security main restricted
-deb-src https://REPOSITORY_SECURITY_UBUNTU xenial-security main restricted
-deb https://REPOSITORY_SECURITY_UBUNTU xenial-security universe
-deb-src https://REPOSITORY_SECURITY_UBUNTU xenial-security universe
-deb https://REPOSITORY_SECURITY_UBUNTU xenial-security multiverse
-deb-src https://REPOSITORY_SECURITY_UBUNTU xenial-security multiverse
+deb REPOSITORY_SECURITY_UBUNTU xenial-security main restricted
+deb-src REPOSITORY_SECURITY_UBUNTU xenial-security main restricted
+deb REPOSITORY_SECURITY_UBUNTU xenial-security universe
+deb-src REPOSITORY_SECURITY_UBUNTU xenial-security universe
+deb REPOSITORY_SECURITY_UBUNTU xenial-security multiverse
+deb-src REPOSITORY_SECURITY_UBUNTU xenial-security multiverse
 
 ## Uncomment the following two lines to add software from Canonical's
 ## 'partner' repository.
diff --git a/infrastructure-provisioning/src/general/lib/aws/actions_lib.py b/infrastructure-provisioning/src/general/lib/aws/actions_lib.py
index 9e2d037..35c8b3d 100644
--- a/infrastructure-provisioning/src/general/lib/aws/actions_lib.py
+++ b/infrastructure-provisioning/src/general/lib/aws/actions_lib.py
@@ -1511,18 +1511,13 @@ def ensure_local_jars(os_user, jars_dir):
     if not exists('/home/{}/.ensure_dir/local_jars_ensured'.format(os_user)):
         try:
             sudo('mkdir -p {0}'.format(jars_dir))
-            if 'local_repository_host' in os.environ:
-                sudo('wget https://{2}/{3}/{4}/hadoop-aws-{0}.jar -O {1}hadoop-aws-{0}.jar'.format(
-                    '2.7.4', jars_dir, os.environ['local_repository_host'], os.environ['local_repository_prefix'],
-                    os.environ['local_repository_packages_repo']))
-                sudo('wget https://{2}/{3}/{4}/aws-java-sdk-{0}.jar -O '
-                     '{1}aws-java-sdk-{0}.jar'.format('1.7.4', jars_dir, os.environ['local_repository_host'],
-                                                      os.environ['local_repository_prefix'],
-                                                      os.environ['local_repository_packages_repo']))
-                sudo('wget https://{2}/{3}/{4}/hadoop-lzo-{0}.jar -O '
-                     '{1}hadoop-lzo-{0}.jar'.format('0.4.20', jars_dir, os.environ['local_repository_host'],
-                                                    os.environ['local_repository_prefix'],
-                                                    os.environ['local_repository_packages_repo']))
+            if os.environ['local_repository_enabled'] == 'True':
+                sudo('wget {2}/hadoop-aws-{0}.jar -O {1}hadoop-aws-{0}.jar'.format(
+                    '2.7.4', jars_dir, os.environ['local_repository_packages_repo']))
+                sudo('wget {2}/aws-java-sdk-{0}.jar -O '
+                     '{1}aws-java-sdk-{0}.jar'.format('1.7.4', jars_dir, os.environ['local_repository_packages_repo']))
+                sudo('wget {2}/hadoop-lzo-{0}.jar -O '
+                     '{1}hadoop-lzo-{0}.jar'.format('0.4.20', jars_dir, os.environ['local_repository_packages_repo']))
             else:
                 sudo('wget https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aws/{0}/hadoop-aws-{0}.jar -O \
                                         {1}hadoop-aws-{0}.jar'.format('2.7.4', jars_dir))
diff --git a/infrastructure-provisioning/src/general/lib/os/debian/common_lib.py b/infrastructure-provisioning/src/general/lib/os/debian/common_lib.py
index bb4a7f7..2c051a7 100644
--- a/infrastructure-provisioning/src/general/lib/os/debian/common_lib.py
+++ b/infrastructure-provisioning/src/general/lib/os/debian/common_lib.py
@@ -54,15 +54,14 @@ def renew_gpg_key():
         sys.exit(1)
 
 
-def update_apt_repository_configuration(repository_host):
+def update_apt_repository_configuration(repository_host=''):
     if not exists('/tmp/apt_conf_update_ensured'):
         put('/root/files/sources.list', '/tmp/sources.list')
         sudo('mv /tmp/sources.list /etc/apt/sources.list')
-        if 'local_repository_host' in os.environ:
-            sudo('sed -i "s|REPOSITORY_UBUNTU|{0}/{1}/{2}/|g" /etc/apt/sources.list'.format(
-                repository_host, os.environ['local_repository_prefix'], os.environ['local_repository_apt_ubuntu_repo']))
-            sudo('sed -i "s|REPOSITORY_SECURITY_UBUNTU|{0}/{1}/{2}/|g" /etc/apt/sources.list'.format(
-                repository_host, os.environ['local_repository_prefix'],
+        if os.environ['local_repository_enabled'] == 'True':
+            sudo('sed -i "s|REPOSITORY_UBUNTU|{0}/|g" /etc/apt/sources.list'.format(
+                 os.environ['local_repository_apt_ubuntu_repo']))
+            sudo('sed -i "s|REPOSITORY_SECURITY_UBUNTU|{0}/|g" /etc/apt/sources.list'.format(
                 os.environ['local_repository_apt_ubuntu_security_repo']))
         else:
             sudo('sed -i "s|REPOSITORY_UBUNTU|{}|g" /etc/apt/sources.list'.format(repository_host))
diff --git a/infrastructure-provisioning/src/general/lib/os/debian/edge_lib.py b/infrastructure-provisioning/src/general/lib/os/debian/edge_lib.py
index e31b969..96c9b33 100644
--- a/infrastructure-provisioning/src/general/lib/os/debian/edge_lib.py
+++ b/infrastructure-provisioning/src/general/lib/os/debian/edge_lib.py
@@ -40,7 +40,7 @@ def configure_http_proxy_server(config):
             sudo('sed -i "s|LDAP_SERVICE_PASSWORD|{}|g" /etc/squid/squid.conf'.format(config['ldap_password']))
             sudo('sed -i "s|LDAP_AUTH_PATH|{}|g" /etc/squid/squid.conf'.format('/usr/lib/squid/basic_ldap_auth'))
             replace_string = ''
-            if 'local_repository_host' in os.environ:
+            if os.environ['local_repository_enabled'] == 'True':
                 config['vpc_cidrs'].append('{}/32'.format(os.environ['local_repository_host']))
                 config['vpc_cidrs'].append('{}/32'.format(os.environ['local_repository_parent_proxy_host']))
                 config['vpc_cidrs'].append('{}/32'.format(os.environ['local_repository_nginx_proxy_host']))
@@ -84,10 +84,9 @@ def install_nginx_ldap(edge_ip, nginx_version, ldap_ip, ldap_dn, ldap_ou, ldap_s
                 sudo('git clone https://github.com/kvspb/nginx-auth-ldap.git')
             sudo('mkdir -p /tmp/src')
             with cd('/tmp/src/'):
-                if 'local_repository_host' in os.environ:
-                    sudo('wget https://{0}/{2}/{3}/nginx-{1}.tar.gz'.format(
-                        os.environ['local_repository_host'], nginx_version, os.environ['local_repository_prefix'],
-                        os.environ['local_repository_packages_repo']))
+                if os.environ['local_repository_enabled'] == 'True':
+                    sudo('wget {0}/nginx-{1}.tar.gz'.format(
+                        os.environ['local_repository_packages_repo'], nginx_version))
                 else:
                     sudo('wget http://nginx.org/download/nginx-{}.tar.gz'.format(nginx_version))
                 sudo('tar -xzf nginx-{}.tar.gz'.format(nginx_version))
diff --git a/infrastructure-provisioning/src/general/lib/os/debian/notebook_lib.py b/infrastructure-provisioning/src/general/lib/os/debian/notebook_lib.py
index 0e2d095..5a5bc5a 100644
--- a/infrastructure-provisioning/src/general/lib/os/debian/notebook_lib.py
+++ b/infrastructure-provisioning/src/general/lib/os/debian/notebook_lib.py
@@ -61,7 +61,7 @@ def ensure_r_local_kernel(spark_version, os_user, templates_dir, kernels_dir):
             sudo('\cp -f /tmp/r_template.json {}/ir/kernel.json'.format(kernels_dir))
             sudo('ln -s /opt/spark/ /usr/local/spark')
             try:
-                if 'local_repository_host' in os.environ:
+                if os.environ['local_repository_enabled'] == 'True':
                     sudo('cd /usr/local/spark/R/lib/SparkR; R -e "install.packages(\'roxygen2\')" '
                          'R -e "devtools::check(\'.\')"')
                 else:
@@ -79,11 +79,9 @@ def ensure_r_local_kernel(spark_version, os_user, templates_dir, kernels_dir):
 @backoff.on_exception(backoff.expo, SystemExit, max_tries=20)
 def add_marruter_key():
     try:
-        if 'local_repository_host' in os.environ:
-            sudo('echo "deb [trusted=yes] https://{0}/{1}/{2} xenial main" >> '
-                 '/etc/apt/sources.list'.format(os.environ['local_repository_host'],
-                                                os.environ['local_repository_prefix'],
-                                                os.environ['local_repository_rrutter_repo']))
+        if os.environ['local_repository_enabled'] == 'True':
+            sudo('echo "deb [trusted=yes] {0} xenial main" >> '
+                 '/etc/apt/sources.list'.format(os.environ['local_repository_rrutter_repo']))
         else:
             sudo('add-apt-repository -y ppa:marutter/rrutter')
     except:
@@ -97,10 +95,8 @@ def ensure_r(os_user, r_libs, region, r_mirror):
                 r_repository = r_mirror
             else:
                 r_repository = 'http://cran.us.r-project.org'
-            if 'local_repository_host' in os.environ:
-                r_repository = 'https://{0}/{1}/{2}/'.format(os.environ['local_repository_host'],
-                                                             os.environ['local_repository_prefix'],
-                                                             os.environ['local_repository_r_repo'])
+            if os.environ['local_repository_enabled'] == 'True':
+                r_repository = os.environ['local_repository_r_repo']
                 put('/root/templates/Rprofile.site', '/tmp/Rprofile.site')
             add_marruter_key()
             sudo('apt update')
@@ -110,11 +106,7 @@ def ensure_r(os_user, r_libs, region, r_mirror):
                 sudo('apt-get install -y r-base r-base-dev')
             except:
                 sudo('apt-get install -y r-base r-base-dev')
-            if 'local_repository_host' in os.environ:
-                sudo('sed -i "s/REPOSITORY_HOST/{0}/g" /tmp/Rprofile.site'.format(
-                    os.environ['local_repository_host']))
-                sudo('sed -i "s/REPOSITORY_PREFIX/{0}/g" /tmp/Rprofile.site'.format(
-                    os.environ['local_repository_prefix']))
+            if os.environ['local_repository_enabled'] == 'True':
                 sudo('sed -i "s/R_REPO/{0}/g" /tmp/Rprofile.site'.format(
                     os.environ['local_repository_r_repo']))
                 sudo('cp -f /tmp/Rprofile.site /etc/R/')
@@ -145,10 +137,9 @@ def install_rstudio(os_user, local_spark_path, rstudio_pass, rstudio_version):
         try:
             sudo('apt-get install -y r-base')
             sudo('apt-get install -y gdebi-core')
-            if 'local_repository_host' in os.environ:
-                sudo('wget https://{0}/{2}/{3}/rstudio-server-{1}-amd64.deb'.format(
-                    os.environ['local_repository_host'], rstudio_version, os.environ['local_repository_prefix'],
-                    os.environ['local_repository_packages_repo']))
+            if os.environ['local_repository_enabled'] == 'True':
+                sudo('wget {0}/rstudio-server-{1}-amd64.deb'.format(
+                    os.environ['local_repository_packages_repo'], rstudio_version))
             else:
                 sudo('wget https://download2.rstudio.org/rstudio-server-{}-amd64.deb'.format(rstudio_version))
             sudo('gdebi -n rstudio-server-{}-amd64.deb'.format(rstudio_version))
@@ -166,7 +157,7 @@ def install_rstudio(os_user, local_spark_path, rstudio_pass, rstudio_version):
             sudo('chown {0}:{0} /home/{0}/.Rprofile'.format(os_user))
             sudo('''echo 'library(SparkR, lib.loc = c(file.path(Sys.getenv("SPARK_HOME"), "R", "lib")))' >> '''
                  '''/home/{}/.Rprofile'''.format(os_user))
-            if 'local_repository_host' not in os.environ:
+            if os.environ['local_repository_enabled'] == 'False':
                 http_proxy = run('echo $http_proxy')
                 https_proxy = run('echo $https_proxy')
                 sudo('''echo 'Sys.setenv(http_proxy = \"{}\")' >> /home/{}/.Rprofile'''.format(http_proxy, os_user))
@@ -209,10 +200,9 @@ def ensure_sbt(os_user):
     if not exists('/home/' + os_user + '/.ensure_dir/sbt_ensured'):
         try:
             sudo('apt-get install -y apt-transport-https')
-            if 'local_repository_host' in os.environ:
-                sudo('echo "deb [trusted=yes] https://{0}/{1}/{2} /" | '
+            if os.environ['local_repository_enabled'] == 'True':
+                sudo('echo "deb [trusted=yes] {0} /" | '
                      'sudo tee -a /etc/apt/sources.list.d/sbt.list'.format(
-                      os.environ['local_repository_host'], os.environ['local_repository_prefix'],
                       os.environ['local_repository_apt_bintray_repo']))
             else:
                 sudo('echo "deb https://dl.bintray.com/sbt/debian /" | sudo tee -a /etc/apt/sources.list.d/sbt.list')
@@ -266,10 +256,9 @@ def ensure_python3_specific_version(python3_version, os_user):
         try:
             if len(python3_version) < 4:
                 python3_version = python3_version + ".0"
-            if 'local_repository_host' in os.environ:
-                sudo('wget https://{1}/{2}/{3}/Python-{0}.tgz'.format(
-                     python3_version, os.environ['local_repository_host'], os.environ['local_repository_prefix'],
-                     os.environ['local_repository_packages_repo']))
+            if os.environ['local_repository_enabled'] == 'True':
+                sudo('wget {1}/Python-{0}.tgz'.format(
+                     python3_version, os.environ['local_repository_packages_repo']))
             else:
                 sudo('wget https://www.python.org/ftp/python/{0}/Python-{0}.tgz'.format(python3_version))
             sudo('tar xzf Python-{0}.tgz; cd Python-{0}; ./configure --prefix=/usr/local; make altinstall'.format(
@@ -340,11 +329,9 @@ def install_tensor(os_user, cuda_version, cuda_file_name,
                 sudo('if [[ $(apt-cache search linux-image-extra-`uname -r`) ]]; then apt-get -y '
                      'install linux-image-extra-`uname -r`; else apt-get -y install '
                      'linux-modules-extra-`uname -r`; fi;')
-            if 'local_repository_host' in os.environ:
-                sudo('wget https://{2}/{3}/{4}/NVIDIA-Linux-x86_64-{0}.run -O '
+            if os.environ['local_repository_enabled'] == 'True':
+                sudo('wget {2}/NVIDIA-Linux-x86_64-{0}.run -O '
                      '/home/{1}/NVIDIA-Linux-x86_64-{0}.run'.format(nvidia_version, os_user,
-                                                                    os.environ['local_repository_host'],
-                                                                    os.environ['local_repository_prefix'],
                                                                     os.environ['local_repository_packages_repo']))
             else:
                 sudo('wget http://us.download.nvidia.com/XFree86/Linux-x86_64/{0}/NVIDIA-Linux-x86_64-{0}.run -O '
@@ -354,10 +341,9 @@ def install_tensor(os_user, cuda_version, cuda_file_name,
             # install cuda
             sudo('python3.5 -m pip install --upgrade pip=={0} wheel numpy=={1} --no-cache-dir'. format(
                 os.environ['conf_pip_version'], os.environ['notebook_numpy_version']))
-            if 'local_repository_host' in os.environ:
-                sudo('wget -P /opt https://{1}/{2}/{3}/{0}'.format(
-                     cuda_file_name, os.environ['local_repository_host'], os.environ['local_repository_prefix'],
-                     os.environ['local_repository_packages_repo']))
+            if os.environ['local_repository_enabled'] == 'True':
+                sudo('wget -P /opt {1}/{0}'.format(
+                     cuda_file_name, os.environ['local_repository_packages_repo']))
             else:
                 sudo('wget -P /opt https://developer.nvidia.com/compute/cuda/{0}/prod/local_installers/{1}'.format(
                     cuda_version, cuda_file_name))
@@ -366,10 +352,9 @@ def install_tensor(os_user, cuda_version, cuda_file_name,
             sudo('ln -s /opt/cuda-{0} /usr/local/cuda-{0}'.format(cuda_version))
             sudo('rm -f /opt/{}'.format(cuda_file_name))
             # install cuDNN
-            if 'local_repository_host' in os.environ:
-                run('wget https://{0}/{2}/{3}/{1} -O /tmp/{1}'.format(
-                    os.environ['local_repository_host'], cudnn_file_name, os.environ['local_repository_prefix'],
-                    os.environ['local_repository_packages_repo']))
+            if os.environ['local_repository_enabled'] == 'True':
+                run('wget {0}/{1} -O /tmp/{1}'.format(
+                    os.environ['local_repository_packages_repo'], cudnn_file_name))
             else:
                 run('wget http://developer.download.nvidia.com/compute/redist/cudnn/v{0}/{1} -O /tmp/{1}'.format(
                     cudnn_version, cudnn_file_name))
@@ -382,17 +367,13 @@ def install_tensor(os_user, cuda_version, cuda_file_name,
             run('echo "export LD_LIBRARY_PATH=\"$LD_LIBRARY_PATH:/opt/cudnn/lib64:/usr/local/cuda/lib64\"" >> '
                 '~/.bashrc')
             # install TensorFlow and run TensorBoard
-            if 'local_repository_host' in os.environ:
-                sudo('python2.7 -m pip install --upgrade https://{0}/{2}/{3}/tensorflow_gpu-{1}-'
-                     'cp27-none-linux_x86_64.whl --no-cache-dir'.format(os.environ['local_repository_host'],
-                                                                        tensorflow_version,
-                                                                        os.environ['local_repository_prefix'],
-                                                                        os.environ['local_repository_packages_repo']))
-                sudo('python3 -m pip install --upgrade https://{0}/{2}/{3}/tensorflow_gpu-{1}-'
-                     'cp35-cp35m-linux_x86_64.whl --no-cache-dir'.format(os.environ['local_repository_host'],
-                                                                         tensorflow_version,
-                                                                         os.environ['local_repository_prefix'],
-                                                                         os.environ['local_repository_packages_repo']))
+            if os.environ['local_repository_enabled'] == 'True':
+                sudo('python2.7 -m pip install --upgrade {0}/tensorflow_gpu-{1}-'
+                     'cp27-none-linux_x86_64.whl --no-cache-dir'.format(os.environ['local_repository_packages_repo'],
+                                                                        tensorflow_version))
+                sudo('python3 -m pip install --upgrade {0}/tensorflow_gpu-{1}-'
+                     'cp35-cp35m-linux_x86_64.whl --no-cache-dir'.format(os.environ['local_repository_packages_repo'],
+                                                                         tensorflow_version))
             else:
                 sudo('python2.7 -m pip install --upgrade https://storage.googleapis.com/tensorflow/linux/gpu/'
                      'tensorflow_gpu-{}-cp27-none-linux_x86_64.whl --no-cache-dir'.format(tensorflow_version))
@@ -441,30 +422,26 @@ def install_livy_dependencies_emr(os_user):
 
 def install_nodejs(os_user):
     if not exists('/home/{}/.ensure_dir/nodejs_ensured'.format(os_user)):
-        if 'local_repository_host' in os.environ:
-            sudo('wget https://{0}/{1}/{2}/node-v8.15.0.tar.gz'.format(
-                os.environ['local_repository_host'], os.environ['local_repository_prefix'],
+        if os.environ['local_repository_enabled'] == 'True':
+            sudo('wget {0}/node-v8.15.0.tar.gz'.format(
                 os.environ['local_repository_packages_repo']))
             sudo('tar zxvf node-v8.15.0.tar.gz')
             sudo('mv node-v8.15.0 /opt/node')
             with cd('/opt/node/'):
                 sudo('./configure')
                 sudo('make -j4')
-                sudo('wget https://{0}/{1}/{2}/linux-x64-57_binding.node'.format(
-                     os.environ['local_repository_host'], os.environ['local_repository_prefix'],
+                sudo('wget {0}/linux-x64-57_binding.node'.format(
                      os.environ['local_repository_packages_repo']))
                 sudo('echo "export PATH=$PATH:/opt/node" >> /etc/profile')
                 sudo('source /etc/profile')
                 sudo('./deps/npm/bin/npm-cli.js config set strict-ssl false')
                 sudo('./deps/npm/bin/npm-cli.js config set sass_binary_path /opt/node/linux-x64-57_binding.node')
-                sudo('./deps/npm/bin/npm-cli.js config set registry https://{0}/{1}/{2}/'.format(
-                     os.environ['local_repository_host'], os.environ['local_repository_prefix'],
+                sudo('./deps/npm/bin/npm-cli.js config set registry {0}/'.format(
                      os.environ['local_repository_npm_repo']))
                 sudo('./deps/npm/bin/npm-cli.js install npm')
                 sudo('cp deps/npm/bin/npm /opt/node/')
                 sudo('npm config set strict-ssl false')
-                sudo('npm config set registry https://{0}/{1}/{2}/'.format(
-                     os.environ['local_repository_prefix'], os.environ['local_repository_prefix'],
+                sudo('npm config set registry {0}/'.format(
                      os.environ['local_repository_npm_repo']))
                 sudo('npm config set sass_binary_path /opt/node/linux-x64-57_binding.node')
         else:
@@ -537,7 +514,7 @@ def install_caffe(os_user, region, caffe_version):
         sudo('git clone https://github.com/BVLC/caffe.git')
         with cd('/home/{}/caffe/'.format(os_user)):
             sudo('git checkout {}'.format(caffe_version))
-            if 'local_repository_host' in os.environ:
+            if os.environ['local_repository_enabled'] == 'True':
                 sudo('pip2 install matplotlib==2.0.2 --no-cache-dir')
                 sudo('pip3 install matplotlib==2.0.2 --no-cache-dir')
             sudo('pip2 install -r python/requirements.txt --no-cache-dir')
@@ -581,7 +558,7 @@ def install_caffe2(os_user, caffe2_version, cmake_version):
         sudo('apt-get install -y --no-install-recommends libgflags-dev')
         sudo('apt-get install -y --no-install-recommends libgtest-dev libiomp-dev libleveldb-dev liblmdb-dev '
              'libopencv-dev libopenmpi-dev libsnappy-dev openmpi-bin openmpi-doc python-pydot')
-        if 'local_repository_host' in os.environ:
+        if os.environ['local_repository_enabled'] == 'True':
             sudo('pip2 install jupyter-console=={} --no-cache-dir'.format(
                 os.environ['notebook_jupyter_console_version']))
         sudo('pip2 install flask graphviz hypothesis jupyter matplotlib==2.0.2 pydot python-nvd3 pyyaml requests '
@@ -590,10 +567,9 @@ def install_caffe2(os_user, caffe2_version, cmake_version):
              'scikit-image scipy setuptools tornado --no-cache-dir')
         sudo('cp -f /opt/cudnn/include/* /opt/cuda-8.0/include/')
         sudo('cp -f /opt/cudnn/lib64/* /opt/cuda-8.0/lib64/')
-        if 'local_repository_host' in os.environ:
-            sudo('wget https://{2}/{3}/{4}/cmake-{1}.tar.gz -O /home/{0}/cmake-{1}.tar.gz'.format(
-                 os_user, cmake_version, os.environ['local_repository_host'], os.environ['local_repository_prefix'],
-                 os.environ['local_repository_packages_repo']))
+        if os.environ['local_repository_enabled'] == 'True':
+            sudo('wget {2}/cmake-{1}.tar.gz -O /home/{0}/cmake-{1}.tar.gz'.format(
+                 os_user, cmake_version, os.environ['local_repository_packages_repo']))
         else:
             sudo('wget https://cmake.org/files/v{2}/cmake-{1}.tar.gz -O /home/{0}/cmake-{1}.tar.gz'.format(
                 os_user, cmake_version, cmake_version.split('.')[0] + "." + cmake_version.split('.')[1]))
@@ -613,15 +589,11 @@ def install_caffe2(os_user, caffe2_version, cmake_version):
 
 def install_cntk(os_user, cntk_version):
     if not exists('/home/{}/.ensure_dir/cntk_ensured'.format(os_user)):
-        if 'local_repository_host' in os.environ:
-            sudo('pip2 install https://{1}/{2}/{3}/cntk-{0}-cp27-cp27mu-linux_x86_64.whl '
-                 '--no-cache-dir'.format(cntk_version, os.environ['local_repository_host'],
-                                         os.environ['local_repository_prefix'],
-                                         os.environ['local_repository_packages_repo']))
-            sudo('pip3 install https://{1}/{2}/{3}/cntk-{0}-cp35-cp35m-linux_x86_64.whl '
-                 '--no-cache-dir'.format(cntk_version, os.environ['local_repository_host'],
-                                         os.environ['local_repository_prefix'],
-                                         os.environ['local_repository_packages_repo']
+        if os.environ['local_repository_enabled'] == 'True':
+            sudo('pip2 install {1}/cntk-{0}-cp27-cp27mu-linux_x86_64.whl '
+                 '--no-cache-dir'.format(cntk_version, os.environ['local_repository_packages_repo']))
+            sudo('pip3 install {1}/cntk-{0}-cp35-cp35m-linux_x86_64.whl '
+                 '--no-cache-dir'.format(cntk_version, os.environ['local_repository_packages_repo']
                                          ))
         else:
             sudo('pip2 install https://cntk.ai/PythonWheel/GPU/cntk-{}-cp27-cp27mu-linux_x86_64.whl '
diff --git a/infrastructure-provisioning/src/general/lib/os/debian/ssn_lib.py b/infrastructure-provisioning/src/general/lib/os/debian/ssn_lib.py
index d84daea..fb8f519 100644
--- a/infrastructure-provisioning/src/general/lib/os/debian/ssn_lib.py
+++ b/infrastructure-provisioning/src/general/lib/os/debian/ssn_lib.py
@@ -32,14 +32,11 @@ def ensure_docker_daemon(dlab_path, os_user, region):
     try:
         if not exists(dlab_path + 'tmp/docker_daemon_ensured'):
             docker_version = os.environ['ssn_docker_version']
-            if 'local_repository_host' in os.environ:
-                sudo('curl -fsSL https://{0}/{1}/{2}/gpg | apt-key add -'.format(
-                    os.environ['local_repository_host'], os.environ['local_repository_prefix'],
+            if os.environ['local_repository_enabled'] == 'True':
+                sudo('curl -fsSL {0}/gpg | apt-key add -'.format(
                     os.environ['local_repository_docker_repo']))
-                sudo('add-apt-repository "deb [arch=amd64] https://{0}/{1}/{2}/ $(lsb_release -cs) \
-                                  stable"'.format(os.environ['local_repository_host'],
-                                                  os.environ['local_repository_prefix'],
-                                                  os.environ['local_repository_docker_repo']))
+                sudo('add-apt-repository "deb [arch=amd64] {0}/ $(lsb_release -cs) \
+                                  stable"'.format(os.environ['local_repository_docker_repo']))
             else:
                 sudo('curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -')
                 sudo('add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) \
@@ -73,15 +70,11 @@ def ensure_nginx(dlab_path):
 def ensure_jenkins(dlab_path):
     try:
         if not exists(dlab_path + 'tmp/jenkins_ensured'):
-            if 'local_repository_host' in os.environ:
-                sudo('wget -q -O - https://{0}/{1}/{2}/jenkins-ci.org.key'
-                     ' | apt-key add -'.format(os.environ['local_repository_host'],
-                                               os.environ['local_repository_prefix'],
-                                               os.environ['local_repository_packages_repo']))
-                sudo('echo deb https://{0}/{1}/{2}/ binary/ > '
-                     '/etc/apt/sources.list.d/jenkins.list'.format(os.environ['local_repository_host'],
-                                                                   os.environ['local_repository_prefix'],
-                                                                   os.environ['local_repository_jenkins_repo']))
+            if os.environ['local_repository_enabled'] == 'True':
+                sudo('wget -q -O - {0}/jenkins-ci.org.key'
+                     ' | apt-key add -'.format(os.environ['local_repository_packages_repo']))
+                sudo('echo deb {0}/ binary/ > '
+                     '/etc/apt/sources.list.d/jenkins.list'.format(os.environ['local_repository_jenkins_repo']))
             else:
                 sudo('wget -q -O - https://pkg.jenkins.io/debian/jenkins-ci.org.key | apt-key add -')
                 sudo('echo deb http://pkg.jenkins.io/debian-stable binary/ > /etc/apt/sources.list.d/jenkins.list')
@@ -172,11 +165,10 @@ def ensure_supervisor():
 def ensure_mongo():
     try:
         if not exists(os.environ['ssn_dlab_path'] + 'tmp/mongo_ensured'):
-            if 'local_repository_host' in os.environ:
-                sudo('ver=`lsb_release -cs`; echo "deb https://{0}/{1}/{2}/ '
+            if os.environ['local_repository_enabled'] == 'True':
+                sudo('ver=`lsb_release -cs`; echo "deb {0}/ '
                      '$ver/mongodb-org/3.2 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-3.2.list; '
-                     'apt-get update'.format(os.environ['local_repository_host'], os.environ['local_repository_prefix'],
-                                             os.environ['local_repository_mongo_repo']))
+                     'apt-get update'.format(os.environ['local_repository_mongo_repo']))
             else:
                 sudo('apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv EA312927')
                 sudo('ver=`lsb_release -cs`; echo "deb http://repo.mongodb.org/apt/ubuntu $ver/mongodb-org/3.2 '
@@ -207,7 +199,7 @@ def start_ss(keyfile, host_string, dlab_conf_dir, web_path,
             local('sed -i "s|KEYSTORE_PASSWORD|{}|g" /root/templates/ssn.yml'.format(keystore_passwd))
             local('sed -i "s|CLOUD_PROVIDER|{}|g" /root/templates/ssn.yml'.format(cloud_provider))
             local('sed -i "s|\${JRE_HOME}|' + java_path + '|g" /root/templates/ssn.yml')
-            if 'local_repository_host' in os.environ:
+            if os.environ['local_repository_enabled'] == 'True':
                 local('sed -i "s|LOCAL_REPO_ENABLED|true|g" /root/templates/ssn.yml')
             else:
                 local('sed -i "s|LOCAL_REPO_ENABLED|false|g" /root/templates/ssn.yml')
@@ -344,16 +336,11 @@ def install_build_dep():
             maven_version = '3.5.4'
             sudo('apt-get install -y openjdk-8-jdk git wget unzip gcc g++ make')
             with cd('/opt/'):
-                if 'local_repository_host' in os.environ:
-                    sudo('wget https://{0}/{2}/{3}/apache-maven-{1}-bin.zip'.format(
-                         os.environ['local_repository_host'], maven_version, os.environ['local_repository_prefix'],
-                         os.environ['local_repository_packages_repo']))
+                if os.environ['local_repository_enabled'] == 'True':
+                    sudo('wget {0}/apache-maven-{1}-bin.zip'.format(
+                         os.environ['local_repository_packages_repo'], maven_version))
                     sudo('unzip apache-maven-{}-bin.zip'.format(maven_version))
                     put('templates/settings.xml', '/tmp/settings.xml')
-                    sudo('sed -i "s|REPOSITORY_HOST|{}|g" /tmp/settings.xml'.format(
-                        os.environ['local_repository_host']))
-                    sudo('sed -i "s|REPOSITORY_PREFIX|{}|g" /tmp/settings.xml'.format(
-                        os.environ['local_repository_prefix']))
                     sudo('sed -i "s|REPOSITORY_MAVEN_REPO|{}|g" /tmp/settings.xml'.format(
                         os.environ['local_repository_maven_central_repo']))
                     sudo('cp -f /tmp/settings.xml apache-maven-{}/conf/'.format(maven_version))
@@ -363,30 +350,26 @@ def install_build_dep():
                         '{1}-bin.zip'.format(maven_version.split('.')[0], maven_version))
                     sudo('unzip apache-maven-{}-bin.zip'.format(maven_version))
                 sudo('mv apache-maven-{} maven'.format(maven_version))
-            if 'local_repository_host' in os.environ:
-                sudo('wget https://{0}/{1}/{2}/node-v8.15.0.tar.gz'.format(
-                     os.environ['local_repository_host'], os.environ['local_repository_prefix'],
+            if os.environ['local_repository_enabled'] == 'True':
+                sudo('wget {0}/node-v8.15.0.tar.gz'.format(
                      os.environ['local_repository_packages_repo']))
                 sudo('tar zxvf node-v8.15.0.tar.gz')
                 sudo('mv node-v8.15.0 /opt/node')
                 with cd('/opt/node/'):
                     sudo('./configure')
                     sudo('make -j4')
-                    sudo('wget https://{0}/{1}/{2}/linux-x64-57_binding.node'.format(
-                         os.environ['local_repository_host'], os.environ['local_repository_prefix'],
+                    sudo('wget {0}/linux-x64-57_binding.node'.format(
                          os.environ['local_repository_packages_repo']))
                     sudo('echo "export PATH=$PATH:/opt/node" >> /etc/profile')
                     sudo('source /etc/profile')
                     sudo('./deps/npm/bin/npm-cli.js config set strict-ssl false')
                     sudo('./deps/npm/bin/npm-cli.js config set sass_binary_path /opt/node/linux-x64-57_binding.node')
-                    sudo('./deps/npm/bin/npm-cli.js config set registry https://{0}/{1}/{2}/'.format(
-                         os.environ['local_repository_host'], os.environ['local_repository_prefix'],
+                    sudo('./deps/npm/bin/npm-cli.js config set registry {0}/'.format(
                          os.environ['local_repository_npm_repo']))
                     sudo('./deps/npm/bin/npm-cli.js install npm')
                     sudo('cp deps/npm/bin/npm /opt/node/')
                     sudo('npm config set strict-ssl false')
-                    sudo('npm config set registry https://{0}/{1}/{2}/'.format(
-                         os.environ['local_repository_host'], os.environ['local_repository_prefix'],
+                    sudo('npm config set registry {0}/'.format(
                          os.environ['local_repository_npm_repo']))
                     sudo('npm config set sass_binary_path /opt/node/linux-x64-57_binding.node')
             else:
diff --git a/infrastructure-provisioning/src/general/lib/os/fab.py b/infrastructure-provisioning/src/general/lib/os/fab.py
index 631d3d6..80df48c 100644
--- a/infrastructure-provisioning/src/general/lib/os/fab.py
+++ b/infrastructure-provisioning/src/general/lib/os/fab.py
@@ -94,11 +94,10 @@ def id_generator(size=10, chars=string.digits + string.ascii_letters):
     return ''.join(random.choice(chars) for _ in range(size))
 
 
-def ensure_dataengine_tensorflow_jars(jars_dir, repository_host='', repository_prefix='', repository_packages_repo=''):
-    if repository_host != '':
-        local('wget https://{0}/{2}/{3}/spark-tensorflow-connector-1.0.0-s_2.11.jar -O '
-              '{1}spark-tensorflow-connector-1.0.0-s_2.11.jar'.format(repository_host, jars_dir, repository_prefix,
-                                                                      repository_packages_repo))
+def ensure_dataengine_tensorflow_jars(jars_dir, local_repository_enabled='', repository_packages_repo=''):
+    if local_repository_enabled == 'True':
+        local('wget {0}/spark-tensorflow-connector-1.0.0-s_2.11.jar -O '
+              '{1}spark-tensorflow-connector-1.0.0-s_2.11.jar'.format(repository_packages_repo, jars_dir))
     else:
         local('wget https://dl.bintray.com/spark-packages/maven/tapanalyticstoolkit/spark-tensorflow-connector/'
               '1.0.0-s_2.11/spark-tensorflow-connector-1.0.0-s_2.11.jar -O '
@@ -150,7 +149,7 @@ def configure_jupyter(os_user, jupyter_conf_file, templates_dir, jupyter_version
     if not exists('/home/' + os_user + '/.ensure_dir/jupyter_ensured'):
         try:
             sudo('pip2 install notebook=={} --no-cache-dir'.format(jupyter_version))
-            if 'local_repository_host' in os.environ:
+            if os.environ['local_repository_enabled'] == 'True':
                 sudo('pip2 install jupyter-console=={} --no-cache-dir'.format(
                      os.environ['notebook_jupyter_console_version']))
             sudo('pip2 install jupyter --no-cache-dir')
@@ -299,10 +298,8 @@ def install_r_pkg(requisites):
     status = list()
     error_parser = "ERROR:|error:|Cannot|failed|Please run|requires"
     try:
-        if 'local_repository_host' in os.environ:
-            r_repo = 'https://{}/{}/{}'.format(os.environ['local_repository_host'],
-                                                   os.environ['local_repository_prefix'],
-                                                   os.environ['local_repository_r_repo'])
+        if os.environ['local_repository_enabled'] == 'True':
+            r_repo = os.environ['local_repository_r_repo']
         else:
             r_repo = 'http://cran.us.r-project.org'
         for r_pkg in requisites:
@@ -395,10 +392,9 @@ def install_java_pkg(requisites):
 def get_available_r_pkgs():
     try:
         r_pkgs = dict()
-        if 'local_repository_host' in os.environ:
-            sudo('R -e \'write.table(available.packages(contriburl="https://{0}/{1}/{2}/src/contrib"), '
+        if os.environ['local_repository_enabled'] == 'True':
+            sudo('R -e \'write.table(available.packages(contriburl="{0}/src/contrib"), '
                  'file="/tmp/r.csv", row.names=F, col.names=F, sep=",")\''.format(
-                os.environ['local_repository_host'], os.environ['local_repository_prefix'],
                 os.environ['local_repository_r_repo']))
         else:
             sudo(
@@ -470,7 +466,7 @@ def install_ungit(os_user, notebook_name):
             sudo('systemctl restart ungit.service')
         except:
             sys.exit(1)
-    if 'local_repository_host' not in os.environ:
+    if os.environ['local_repository_enabled'] == 'False':
         run('git config --global http.proxy $http_proxy')
         run('git config --global https.proxy $https_proxy')
 
@@ -490,10 +486,8 @@ def set_mongo_parameters(client, mongo_parameters):
 
 def install_r_packages(os_user):
     if not exists('/home/' + os_user + '/.ensure_dir/r_packages_ensured'):
-        if 'local_repository_host' in os.environ:
-            r_repository = 'https://{0}/{1}/{2}/'.format(os.environ['local_repository_host'],
-                                                         os.environ['local_repository_prefix'],
-                                                         os.environ['local_repository_r_repo'])
+        if os.environ['local_repository_enabled'] == 'True':
+            r_repository = os.environ['local_repository_r_repo']
         else:
             r_repository = 'http://cran.us.r-project.org'
         sudo('R -e "install.packages(\'devtools\', repos = \'{0}\')"'.format(r_repository))
@@ -510,40 +504,27 @@ def add_breeze_library_local(os_user):
             breeze_tmp_dir = '/tmp/breeze_tmp_local/'
             jars_dir = '/opt/jars/'
             sudo('mkdir -p {}'.format(breeze_tmp_dir))
-            if 'local_repository_host' in os.environ:
-                sudo('wget https://{3}/{4}/{5}/breeze_{0}-{1}.jar -O {2}breeze_{0}-{1}.jar'.format(
-                     '2.11', '0.12', breeze_tmp_dir, os.environ['local_repository_host'],
-                     os.environ['local_repository_prefix'], os.environ['local_repository_packages_repo']))
-                sudo('wget https://{3}/{4}/{5}/breeze-natives_{0}-{1}.jar -O '
+            if os.environ['local_repository_enabled'] == 'True':
+                sudo('wget {3}breeze_{0}-{1}.jar -O {2}breeze_{0}-{1}.jar'.format(
+                     '2.11', '0.12', breeze_tmp_dir, os.environ['local_repository_packages_repo']))
+                sudo('wget {3}/breeze-natives_{0}-{1}.jar -O '
                      '{2}breeze-natives_{0}-{1}.jar'.format('2.11', '0.12', breeze_tmp_dir,
-                                                            os.environ['local_repository_host'],
-                                                            os.environ['local_repository_prefix'],
                                                             os.environ['local_repository_packages_repo']))
-                sudo('wget https://{3}/{4}/{5}/breeze-viz_{0}-{1}.jar -O '
+                sudo('wget {3}/breeze-viz_{0}-{1}.jar -O '
                      '{2}breeze-viz_{0}-{1}.jar'.format('2.11', '0.12', breeze_tmp_dir,
-                                                        os.environ['local_repository_host'],
-                                                        os.environ['local_repository_prefix'],
                                                         os.environ['local_repository_packages_repo']))
-                sudo('wget https://{3}/{4}/{5}/breeze-macros_{0}-{1}.jar -O '
+                sudo('wget {3}/breeze-macros_{0}-{1}.jar -O '
                      '{2}breeze-macros_{0}-{1}.jar'.format('2.11', '0.12', breeze_tmp_dir,
-                                                           os.environ['local_repository_host'],
-                                                           os.environ['local_repository_prefix'],
                                                            os.environ['local_repository_packages_repo']))
-                sudo('wget https://{3}/{4}/{5}/breeze-parent_{0}-{1}.jar -O '
+                sudo('wget {3}/breeze-parent_{0}-{1}.jar -O '
                      '{2}breeze-parent_{0}-{1}.jar'.format('2.11', '0.12', breeze_tmp_dir,
-                                                           os.environ['local_repository_host'],
-                                                           os.environ['local_repository_prefix'],
                                                            os.environ['local_repository_packages_repo']))
-                sudo('wget https://{2}/{3}/{4}/jfreechart-{0}.jar -O {1}jfreechart-{0}.jar'.format(
-                     '1.0.19', breeze_tmp_dir, os.environ['local_repository_host'],
-                     os.environ['local_repository_prefix'], os.environ['local_repository_packages_repo']))
-                sudo('wget https://{2}/{3}/{4}/jcommon-{0}.jar -O {1}jcommon-{0}.jar'.format(
-                     '1.0.24', breeze_tmp_dir, os.environ['local_repository_host'],
-                     os.environ['local_repository_prefix'], os.environ['local_repository_packages_repo']))
-                sudo('wget https://{2}/{3}/{4}/spark-kernel-brunel-all-{0}.jar -O '
+                sudo('wget {2}/jfreechart-{0}.jar -O {1}jfreechart-{0}.jar'.format(
+                     '1.0.19', breeze_tmp_dir, os.environ['local_repository_packages_repo']))
+                sudo('wget {2}/jcommon-{0}.jar -O {1}jcommon-{0}.jar'.format(
+                     '1.0.24', breeze_tmp_dir, os.environ['local_repository_packages_repo']))
+                sudo('wget {2}/spark-kernel-brunel-all-{0}.jar -O '
                      '{1}spark-kernel-brunel-all-{0}.jar'.format('2.3', breeze_tmp_dir,
-                                                                 os.environ['local_repository_host'],
-                                                                 os.environ['local_repository_prefix'],
                                                                  os.environ['local_repository_packages_repo']))
             else:
                 sudo('wget http://central.maven.org/maven2/org/scalanlp/breeze_{0}/{1}/breeze_{0}-{1}.jar -O '
diff --git a/infrastructure-provisioning/src/general/scripts/aws/edge_prepare.py b/infrastructure-provisioning/src/general/scripts/aws/edge_prepare.py
index e84929c..f27c747 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/edge_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/edge_prepare.py
@@ -297,7 +297,7 @@ if __name__ == "__main__":
                 "ToPort": 389, "IpProtocol": "-1", "UserIdGroupPairs": []
             }
         ])
-        if 'local_repository_host' in os.environ:
+        if os.environ['local_repository_enabled'] == 'True':
             edge_sg_egress.append(
                 {
                     "PrefixListIds": [],
diff --git a/infrastructure-provisioning/src/general/scripts/aws/jupyter_dataengine-service_create_configs.py b/infrastructure-provisioning/src/general/scripts/aws/jupyter_dataengine-service_create_configs.py
index fce4f18..21361f4 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/jupyter_dataengine-service_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/jupyter_dataengine-service_create_configs.py
@@ -48,9 +48,8 @@ parser.add_argument('--pip_mirror', type=str, default='')
 parser.add_argument('--numpy_version', type=str, default='')
 parser.add_argument('--application', type=str, default='')
 parser.add_argument('--r_enabled', type=str, default='')
-parser.add_argument('--local_repository_host', type=str, default='')
+parser.add_argument('--local_repository_enabled', type=str, default='')
 parser.add_argument('--local_repository_packages_repo', type=str, default='')
-parser.add_argument('--local_repository_prefix', type=str, default='')
 args = parser.parse_args()
 
 emr_dir = '/opt/' + args.emr_version + '/jars/'
@@ -140,36 +139,27 @@ def add_breeze_library_emr(args):
     breeze_tmp_dir = '/tmp/breeze_tmp_emr/'
     local('sudo mkdir -p ' + new_jars_directory_path)
     local('mkdir -p ' + breeze_tmp_dir)
-    if args.local_repository_host != '':
-        local('wget https://{3}/{5}/{4}/breeze_{0}-{1}.jar -O {2}breeze_{0}-{1}.jar'.format(
-              '2.11', '0.12', breeze_tmp_dir, args.local_repository_host, args.local_repository_packages_repo,
-              args.local_repository_prefix))
-        local('wget https://{3}/{5}/{4}/breeze-natives_{0}-{1}.jar -O '
+    if args.local_repository_enabled == 'True':
+        local('wget {3}/breeze_{0}-{1}.jar -O {2}breeze_{0}-{1}.jar'.format('2.11', '0.12', breeze_tmp_dir,
+                                                                            args.local_repository_packages_repo))
+        local('wget {3}/breeze-natives_{0}-{1}.jar -O '
               '{2}breeze-natives_{0}-{1}.jar'.format('2.11', '0.12', breeze_tmp_dir,
-                                                     args.local_repository_host, args.local_repository_packages_repo,
-                                                     args.local_repository_prefix))
-        local('wget https://{3}/{5}/{4}/breeze-viz_{0}-{1}.jar -O '
+                                                     args.local_repository_packages_repo))
+        local('wget {3}/breeze-viz_{0}-{1}.jar -O '
               '{2}breeze-viz_{0}-{1}.jar'.format('2.11', '0.12', breeze_tmp_dir,
-                                                 args.local_repository_host, args.local_repository_packages_repo,
-                                                 args.local_repository_prefix))
-        local('wget https://{3}/{5}/{4}/breeze-macros_{0}-{1}.jar -O '
+                                                 args.local_repository_packages_repo))
+        local('wget {3}/breeze-macros_{0}-{1}.jar -O '
               '{2}breeze-macros_{0}-{1}.jar'.format('2.11', '0.12', breeze_tmp_dir,
-                                                    args.local_repository_host, args.local_repository_packages_repo,
-                                                    args.local_repository_prefix))
-        local('wget https://{3}/{5}/{4}/breeze-parent_{0}-{1}.jar -O '
+                                                    args.local_repository_packages_repo))
+        local('wget {3}/breeze-parent_{0}-{1}.jar -O '
               '{2}breeze-parent_{0}-{1}.jar'.format('2.11', '0.12', breeze_tmp_dir,
-                                                    args.local_repository_host, args.local_repository_packages_repo,
-                                                    args.local_repository_prefix))
-        local('wget https://{2}/{4}/{3}/jfreechart-{0}.jar -O {1}jfreechart-{0}.jar'.format(
-              '1.0.19', breeze_tmp_dir, args.local_repository_host, args.local_repository_packages_repo,
-              args.local_repository_prefix))
-        local('wget https://{2}/{4}/{3}/jcommon-{0}.jar -O {1}jcommon-{0}.jar'.format(
-              '1.0.24', breeze_tmp_dir, args.local_repository_host, args.local_repository_packages_repo,
-              args.local_repository_prefix))
-        local('wget https://{2}/{4}/{3}/spark-kernel-brunel-all-{0}.jar -O '
-              '{1}spark-kernel-brunel-all-{0}.jar'.format('2.3', breeze_tmp_dir, args.local_repository_host,
-                                                          args.local_repository_packages_repo,
-                                                          args.local_repository_prefix))
+                                                    args.local_repository_packages_repo))
+        local('wget {2}/jfreechart-{0}.jar -O {1}jfreechart-{0}.jar'.format(
+              '1.0.19', breeze_tmp_dir, args.local_repository_packages_repo))
+        local('wget {2}/jcommon-{0}.jar -O {1}jcommon-{0}.jar'.format(
+              '1.0.24', breeze_tmp_dir, args.local_repository_packages_repo))
+        local('wget {2}/spark-kernel-brunel-all-{0}.jar -O '
+              '{1}spark-kernel-brunel-all-{0}.jar'.format('2.3', breeze_tmp_dir, args.local_repository_packages_repo))
     else:
         local('wget http://central.maven.org/maven2/org/scalanlp/breeze_{0}/{1}/breeze_{0}-{1}.jar -O '
               '{2}breeze_{0}-{1}.jar'.format('2.11', '0.12', breeze_tmp_dir))
diff --git a/infrastructure-provisioning/src/general/scripts/aws/jupyter_install_dataengine-service_kernels.py b/infrastructure-provisioning/src/general/scripts/aws/jupyter_install_dataengine-service_kernels.py
index 1a32ac5..bf0c42b 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/jupyter_install_dataengine-service_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/jupyter_install_dataengine-service_kernels.py
@@ -76,19 +76,14 @@ if __name__ == "__main__":
     hadoop_version = get_hadoop_version(args.cluster_name)
     r_enabled = os.environ['notebook_r_enabled']
     numpy_version = os.environ['notebook_numpy_version']
-    if 'local_repository_host' in os.environ:
-        repository_host = os.environ['local_repository_host']
+    if os.environ['local_repository_enabled'] == 'True':
         packages_repo = os.environ['local_repository_packages_repo']
-        repository_prefix = os.environ['local_repository_prefix']
     else:
-        repository_host = ''
         packages_repo = ''
-        repository_prefix = ''
     sudo("/usr/bin/python /usr/local/bin/jupyter_dataengine-service_create_configs.py --bucket " + args.bucket +
          " --cluster_name " + args.cluster_name + " --emr_version " + args.emr_version + " --spark_version " +
          spark_version + " --hadoop_version " + hadoop_version + " --region " + args.region + " --excluded_lines '"
          + args.emr_excluded_spark_properties + "' --user_name " + args.edge_user_name + " --os_user " + args.os_user +
          " --pip_mirror " + args.pip_mirror + " --numpy_version " + numpy_version + " --application " +
-         args.application + " --r_enabled " + r_enabled + " --local_repository_host '" +
-         repository_host + "' --local_repository_packages_repo '" + packages_repo + "' --local_repository_prefix '" +
-         repository_prefix + "' ")
+         args.application + " --r_enabled " + r_enabled + " --local_repository_enabled " +
+         os.environ['local_repository_enabled'] + " --local_repository_packages_repo '" + packages_repo + "' ")
diff --git a/infrastructure-provisioning/src/general/scripts/aws/ssn_create_endpoint.py b/infrastructure-provisioning/src/general/scripts/aws/ssn_create_endpoint.py
index e64f179..831ce0c 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/ssn_create_endpoint.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/ssn_create_endpoint.py
@@ -85,7 +85,7 @@ if __name__ == "__main__":
                     endpoint = endpoint_id
             print("S3 ENDPOINT: {}".format(endpoint))
 
-            if 'local_repository_host' in os.environ and args.duo_vpc_enable == 'false':
+            if os.environ['local_repository_enabled'] == 'True' and args.duo_vpc_enable == 'false':
                 # Creating Security Group and EC2 endpoint
                 sg_tag = {"Key": args.infra_tag_value, "Value": args.infra_tag_name}
                 allowed_vpc_cidr_ip_ranges = list()
diff --git a/infrastructure-provisioning/src/general/scripts/aws/zeppelin_dataengine-service_create_configs.py b/infrastructure-provisioning/src/general/scripts/aws/zeppelin_dataengine-service_create_configs.py
index 1393c9f..c722cbe 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/zeppelin_dataengine-service_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/zeppelin_dataengine-service_create_configs.py
@@ -53,9 +53,8 @@ parser.add_argument('--pip_mirror', type=str, default='')
 parser.add_argument('--numpy_version', type=str, default='')
 parser.add_argument('--application', type=str, default='')
 parser.add_argument('--r_enabled', type=str, default='')
-parser.add_argument('--local_repository_host', type=str, default='')
+parser.add_argument('--local_repository_enabled', type=str, default='')
 parser.add_argument('--local_repository_packages_repo', type=str, default='')
-parser.add_argument('--local_repository_prefix', type=str, default='')
 args = parser.parse_args()
 
 emr_dir = '/opt/' + args.emr_version + '/jars/'
@@ -73,12 +72,10 @@ else:
 def install_remote_livy(args):
     local('sudo chown ' + args.os_user + ':' + args.os_user + ' -R /opt/zeppelin/')
     local('sudo service zeppelin-notebook stop')
-    if args.local_repository_host != '':
-        local('sudo wget -i https://{1}/{4}/{5}/livy-server-{0}.zip -O '
-              '/opt/{2}/{3}/livy-server-{0}.zip'.format(args.livy_version, args.local_repository_host,
-                                                        args.emr_version, args.cluster_name,
-                                                        args.local_repository_prefix,
-                                                        args.local_repository_packages_repo))
+    if args.local_repository_enabled == 'True':
+        local('sudo wget -i {1}/livy-server-{0}.zip -O '
+              '/opt/{2}/{3}/livy-server-{0}.zip'.format(args.livy_version, args.local_repository_packages_repo,
+                                                        args.emr_version, args.cluster_name))
     else:
         local('sudo -i wget http://archive.cloudera.com/beta/livy/livy-server-{0}.zip -O '
               '/opt/{1}/{2}/livy-server-{0}.zip'.format(args.livy_version, args.emr_version, args.cluster_name))
diff --git a/infrastructure-provisioning/src/general/scripts/aws/zeppelin_install_dataengine-service_kernels.py b/infrastructure-provisioning/src/general/scripts/aws/zeppelin_install_dataengine-service_kernels.py
index bcbc60b..e9eacf3 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/zeppelin_install_dataengine-service_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/zeppelin_install_dataengine-service_kernels.py
@@ -74,14 +74,10 @@ if __name__ == "__main__":
     livy_version = os.environ['notebook_livy_version']
     r_enabled = os.environ['notebook_r_enabled']
     numpy_version = os.environ['notebook_numpy_version']
-    if 'local_repository_host' in os.environ:
-        repository_host = os.environ['local_repository_host']
+    if os.environ['local_repository_enabled'] == 'True':
         packages_repo = os.environ['local_repository_packages_repo']
-        repository_prefix = os.environ['local_repository_prefix']
     else:
-        repository_host = ''
         packages_repo = ''
-        repository_prefix = ''
     command = "/usr/bin/python /usr/local/bin/zeppelin_dataengine-service_create_configs.py " \
               "--bucket {0} " \
               "--cluster_name {1} " \
@@ -101,9 +97,8 @@ if __name__ == "__main__":
               "--numpy_version {15} " \
               "--application {16} " \
               "--r_enabled {17} " \
-              "--local_repository_host '{18}' " \
+              "--local_repository_enabled '{18}' " \
               "--local_repository_packages_repo '{19}' " \
-              "--local_repository_prefix '{20}' " \
         .format(args.bucket,
                 args.cluster_name,
                 args.emr_version,
@@ -122,7 +117,6 @@ if __name__ == "__main__":
                 numpy_version,
                 args.application,
                 r_enabled,
-                repository_host,
-                packages_repo,
-                repository_prefix)
+                os.environ['local_repository_enabled'],
+                packages_repo)
     sudo(command)
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/general/scripts/os/common_clean_instance.py b/infrastructure-provisioning/src/general/scripts/os/common_clean_instance.py
index 2277af4..157a232 100644
--- a/infrastructure-provisioning/src/general/scripts/os/common_clean_instance.py
+++ b/infrastructure-provisioning/src/general/scripts/os/common_clean_instance.py
@@ -38,7 +38,7 @@ def general_clean():
         sudo('npm -g uninstall ungit')
         sudo('rm -f /etc/systemd/system/ungit.service')
         sudo('systemctl daemon-reload')
-        if 'local_repository_host' in os.environ:
+        if os.environ['local_repository_enabled'] == 'True':
             sudo('rm -rf /opt/node/')
         else:
             remove_os_pkg(['nodejs', 'npm'])
diff --git a/infrastructure-provisioning/src/general/scripts/os/deeplearning_dataengine_create_configs.py b/infrastructure-provisioning/src/general/scripts/os/deeplearning_dataengine_create_configs.py
index e82f3ce..4698dc7 100644
--- a/infrastructure-provisioning/src/general/scripts/os/deeplearning_dataengine_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/deeplearning_dataengine_create_configs.py
@@ -41,9 +41,8 @@ parser.add_argument('--spark_master', type=str, default='')
 parser.add_argument('--region', type=str, default='')
 parser.add_argument('--datalake_enabled', type=str, default='')
 parser.add_argument('--spark_configurations', type=str, default='')
-parser.add_argument('--local_repository_host', type=str, default='')
+parser.add_argument('--local_repository_enabled', type=str, default='')
 parser.add_argument('--local_repository_packages_repo', type=str, default='')
-parser.add_argument('--local_repository_prefix', type=str, default='')
 args = parser.parse_args()
 
 kernels_dir = '/home/' + args.os_user + '/.local/share/jupyter/kernels/'
@@ -52,15 +51,13 @@ local_jars_dir = '/opt/jars/'
 
 spark_version = args.spark_version
 hadoop_version = args.hadoop_version
-if args.local_repository_host != '':
-    scala_link = 'https://{0}/{1}/{2}/'.format(args.local_repository_host, args.local_repository_prefix,
-                                               args.local_repository_packages_repo)
+if args.local_repository_enabled == 'True':
+    scala_link = '{0}/'.format(args.local_repository_packages_repo)
 else:
     scala_link = "http://www.scala-lang.org/files/archive/"
-if args.local_repository_host != '':
-    spark_link = "https://{0}/{3}/{4}/spark-{1}-bin-hadoop{2}.tgz".format(
-        args.local_repository_host, spark_version, hadoop_version, args.local_repository_prefix,
-        args.local_repository_packages_repo)
+if args.local_repository_enabled == 'True':
+    spark_link = "{0}/spark-{1}-bin-hadoop{2}.tgz".format(
+        args.local_repository_packages_repo, spark_version, hadoop_version)
 else:
     spark_link = "https://archive.apache.org/dist/spark/spark-{0}/spark-{0}-bin-hadoop{1}.tgz".format(spark_version,
                                                                                                       hadoop_version)
@@ -116,7 +113,7 @@ if __name__ == "__main__":
         dataengine_dir_prepare('/opt/{}/'.format(args.cluster_name))
         install_dataengine_spark(args.cluster_name, spark_link, spark_version, hadoop_version, cluster_dir, args.os_user,
                                  args.datalake_enabled)
-        ensure_dataengine_tensorflow_jars(local_jars_dir, args.local_repository_host, args.local_repository_prefix,
+        ensure_dataengine_tensorflow_jars(local_jars_dir, args.local_repository_enabled,
                                           args.local_repository_packages_repo)
         configure_dataengine_spark(args.cluster_name, local_jars_dir, cluster_dir, args.datalake_enabled,
                                    args.spark_configurations)
diff --git a/infrastructure-provisioning/src/general/scripts/os/deeplearning_install_dataengine_kernels.py b/infrastructure-provisioning/src/general/scripts/os/deeplearning_install_dataengine_kernels.py
index 8972ad9..686de13 100644
--- a/infrastructure-provisioning/src/general/scripts/os/deeplearning_install_dataengine_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/os/deeplearning_install_dataengine_kernels.py
@@ -69,21 +69,17 @@ if __name__ == "__main__":
         region = os.environ['aws_region']
     except:
         region = ''
-    if 'local_repository_host' in os.environ:
-        repository_host = os.environ['local_repository_host']
+    if os.environ['local_repository_enabled'] == 'True':
         packages_repo = os.environ['local_repository_packages_repo']
-        repository_prefix = os.environ['local_repository_prefix']
     else:
-        repository_host = ''
         packages_repo = ''
-        repository_prefix = ''
     configure_notebook(args.keyfile, env.host_string)
     if 'spark_configurations' not in os.environ:
         os.environ['spark_configurations'] = '[]'
     sudo('/usr/bin/python /usr/local/bin/deeplearning_dataengine_create_configs.py '
          '--cluster_name {} --spark_version {} --hadoop_version {} --os_user {} --spark_master {} --region {} '
-         '--datalake_enabled {} --spark_configurations "{}" --local_repository_host "{}" '
-         '--local_repository_packages_repo "{}" --local_repository_prefix "{}"'.format(
+         '--datalake_enabled {} --spark_configurations "{}" --local_repository_enabled {} '
+         '--local_repository_packages_repo "{}" '.format(
           args.cluster_name, args.spark_version, args.hadoop_version, args.os_user,  args.spark_master,
-          region, args.datalake_enabled, os.environ['spark_configurations'], repository_host, packages_repo,
-          repository_prefix))
+          region, args.datalake_enabled, os.environ['spark_configurations'], os.environ['local_repository_enabled'],
+          packages_repo))
diff --git a/infrastructure-provisioning/src/general/scripts/os/jupyter_dataengine_create_configs.py b/infrastructure-provisioning/src/general/scripts/os/jupyter_dataengine_create_configs.py
index 1d41ab0..bc00111 100644
--- a/infrastructure-provisioning/src/general/scripts/os/jupyter_dataengine_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/jupyter_dataengine_create_configs.py
@@ -42,9 +42,8 @@ parser.add_argument('--region', type=str, default='')
 parser.add_argument('--datalake_enabled', type=str, default='')
 parser.add_argument('--spark_configurations', type=str, default='')
 parser.add_argument('--r_enabled', type=str, default='')
-parser.add_argument('--local_repository_host', type=str, default='')
+parser.add_argument('--local_repository_enabled', type=str, default='')
 parser.add_argument('--local_repository_packages_repo', type=str, default='')
-parser.add_argument('--local_repository_prefix', type=str, default='')
 args = parser.parse_args()
 
 kernels_dir = '/home/' + args.os_user + '/.local/share/jupyter/kernels/'
@@ -53,15 +52,13 @@ local_jars_dir = '/opt/jars/'
 
 spark_version = args.spark_version
 hadoop_version = args.hadoop_version
-if args.local_repository_host != '':
-    scala_link = 'https://{0}/{1}/{2}/'.format(args.local_repository_host, args.local_repository_prefix,
-                                               args.local_repository_packages_repo)
+if args.local_repository_enabled == 'True':
+    scala_link = '{0}/'.format(args.local_repository_packages_repo)
 else:
     scala_link = "http://www.scala-lang.org/files/archive/"
-if args.local_repository_host != '':
-    spark_link = "https://{0}/{3}/{4}/spark-{1}-bin-hadoop{2}.tgz".format(
-        args.local_repository_host, spark_version, hadoop_version, args.local_repository_prefix,
-        args.local_repository_packages_repo)
+if args.local_repository_enabled == 'True':
+    spark_link = "{0}/spark-{1}-bin-hadoop{2}.tgz".format(
+        args.local_repository_packages_repo, spark_version, hadoop_version)
 else:
     spark_link = "https://archive.apache.org/dist/spark/spark-{0}/spark-{0}-bin-hadoop{1}.tgz".format(spark_version,
                                                                                                       hadoop_version)
diff --git a/infrastructure-provisioning/src/general/scripts/os/jupyter_install_dataengine_kernels.py b/infrastructure-provisioning/src/general/scripts/os/jupyter_install_dataengine_kernels.py
index 90ed00b..fdbcff8 100644
--- a/infrastructure-provisioning/src/general/scripts/os/jupyter_install_dataengine_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/os/jupyter_install_dataengine_kernels.py
@@ -73,23 +73,18 @@ if __name__ == "__main__":
     except:
         region = ''
     r_enabled = os.environ['notebook_r_enabled']
-    if 'local_repository_host' in os.environ:
-        repository_host = os.environ['local_repository_host']
+    if os.environ['local_repository_enabled'] == 'True':
         packages_repo = os.environ['local_repository_packages_repo']
-        repository_prefix = os.environ['local_repository_prefix']
     else:
-        repository_host = ''
         packages_repo = ''
-        repository_prefix = ''
     if 'spark_configurations' not in os.environ:
         os.environ['spark_configurations'] = '[]'
     configure_notebook(args.keyfile, env.host_string)
     sudo('/usr/bin/python /usr/local/bin/jupyter_dataengine_create_configs.py '
          '--cluster_name {} --spark_version {} --hadoop_version {} --os_user {} \
          --spark_master {} --region {} --datalake_enabled {} --r_enabled {} --spark_configurations "{}" '
-         '--local_repository_host "{}" --local_repository_packages_repo "{}" --local_repository_prefix "{}" '.
+         '--local_repository_enabled {} --local_repository_packages_repo "{}" '.
          format(args.cluster_name, args.spark_version, args.hadoop_version, args.os_user, args.spark_master,
-                region, args.datalake_enabled, r_enabled, os.environ['spark_configurations'], repository_host,
-                packages_repo, repository_prefix
-                ))
+                region, args.datalake_enabled, r_enabled, os.environ['spark_configurations'],
+                os.environ['local_repository_enabled'], packages_repo))
 
diff --git a/infrastructure-provisioning/src/general/scripts/os/rstudio_dataengine_create_configs.py b/infrastructure-provisioning/src/general/scripts/os/rstudio_dataengine_create_configs.py
index 8459a22..2baaa6e 100644
--- a/infrastructure-provisioning/src/general/scripts/os/rstudio_dataengine_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/rstudio_dataengine_create_configs.py
@@ -41,19 +41,17 @@ parser.add_argument('--spark_master', type=str, default='')
 parser.add_argument('--region', type=str, default='')
 parser.add_argument('--datalake_enabled', type=str, default='')
 parser.add_argument('--spark_configurations', type=str, default='')
-parser.add_argument('--local_repository_host', type=str, default='')
+parser.add_argument('--local_repository_enabled', type=str, default='')
 parser.add_argument('--local_repository_packages_repo', type=str, default='')
-parser.add_argument('--local_repository_prefix', type=str, default='')
 args = parser.parse_args()
 
 cluster_dir = '/opt/' + args.cluster_name + '/'
 local_jars_dir = '/opt/jars/'
 spark_version = args.spark_version
 hadoop_version = args.hadoop_version
-if args.local_repository_host != '':
-    spark_link = "https://{0}/{3}/{4}/spark-{1}-bin-hadoop{2}.tgz".format(
-        args.local_repository_host, spark_version, hadoop_version, args.local_repository_prefix,
-        args.local_repository_packages_repo)
+if args.local_repository_enabled == 'True':
+    spark_link = "{0}/spark-{1}-bin-hadoop{2}.tgz".format(
+        args.local_repository_packages_repo, spark_version, hadoop_version)
 else:
     spark_link = "https://archive.apache.org/dist/spark/spark-{0}/spark-{0}-bin-hadoop{1}.tgz".format(spark_version,
                                                                                                       hadoop_version)
diff --git a/infrastructure-provisioning/src/general/scripts/os/rstudio_install_dataengine_kernels.py b/infrastructure-provisioning/src/general/scripts/os/rstudio_install_dataengine_kernels.py
index bc73388..d257736 100644
--- a/infrastructure-provisioning/src/general/scripts/os/rstudio_install_dataengine_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/os/rstudio_install_dataengine_kernels.py
@@ -67,19 +67,15 @@ if __name__ == "__main__":
         region = ''
     if 'spark_configurations' not in os.environ:
         os.environ['spark_configurations'] = '[]'
-    if 'local_repository_host' in os.environ:
-        repository_host = os.environ['local_repository_host']
+    if os.environ['local_repository_enabled'] == 'True':
         packages_repo = os.environ['local_repository_packages_repo']
-        repository_prefix = os.environ['local_repository_prefix']
     else:
-        repository_host = ''
         packages_repo = ''
-        repository_prefix = ''
     configure_notebook(args.keyfile, env.host_string)
     sudo('/usr/bin/python /usr/local/bin/rstudio_dataengine_create_configs.py '
          '--cluster_name {} --spark_version {} --hadoop_version {} --os_user {} --spark_master {} --region {} '
-         '--datalake_enabled {} --spark_configurations "{}" --local_repository_host "{}" '
-         '--local_repository_packages_repo "{}" --local_repository_prefix "{}" '.
+         '--datalake_enabled {} --spark_configurations "{}" --local_repository_enabled {} '
+         '--local_repository_packages_repo "{}" '.
          format(args.cluster_name, args.spark_version, args.hadoop_version, args.os_user, args.spark_master, region,
-                args.datalake_enabled, os.environ['spark_configurations'], repository_host, packages_repo,
-                repository_prefix))
+                args.datalake_enabled, os.environ['spark_configurations'], os.environ['local_repository_enabled'],
+                packages_repo))
diff --git a/infrastructure-provisioning/src/general/scripts/os/tensor-rstudio_dataengine_create_configs.py b/infrastructure-provisioning/src/general/scripts/os/tensor-rstudio_dataengine_create_configs.py
index 566a8a4..53e0234 100644
--- a/infrastructure-provisioning/src/general/scripts/os/tensor-rstudio_dataengine_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/tensor-rstudio_dataengine_create_configs.py
@@ -41,19 +41,17 @@ parser.add_argument('--spark_master', type=str, default='')
 parser.add_argument('--region', type=str, default='')
 parser.add_argument('--datalake_enabled', type=str, default='')
 parser.add_argument('--spark_configurations', type=str, default='')
-parser.add_argument('--local_repository_host', type=str, default='')
+parser.add_argument('--local_repository_enabled', type=str, default='')
 parser.add_argument('--local_repository_packages_repo', type=str, default='')
-parser.add_argument('--local_repository_prefix', type=str, default='')
 args = parser.parse_args()
 
 cluster_dir = '/opt/' + args.cluster_name + '/'
 local_jars_dir = '/opt/jars/'
 spark_version = args.spark_version
 hadoop_version = args.hadoop_version
-if args.local_repository_host != '':
-    spark_link = "https://{0}/{3}/{4}/spark-{1}-bin-hadoop{2}.tgz".format(
-        args.local_repository_host, spark_version, hadoop_version, args.local_repository_prefix,
-        args.local_repository_packages_repo)
+if args.local_repository_enabled != '':
+    spark_link = "{0}/spark-{1}-bin-hadoop{2}.tgz".format(
+        args.local_repository_packages_repo, spark_version, hadoop_version)
 else:
     spark_link = "https://archive.apache.org/dist/spark/spark-{0}/spark-{0}-bin-hadoop{1}.tgz".format(spark_version,
                                                                                                       hadoop_version)
@@ -95,7 +93,7 @@ if __name__ == "__main__":
     dataengine_dir_prepare('/opt/{}/'.format(args.cluster_name))
     install_dataengine_spark(args.cluster_name, spark_link, spark_version, hadoop_version, cluster_dir, args.os_user,
                              args.datalake_enabled)
-    ensure_dataengine_tensorflow_jars(local_jars_dir, args.local_repository_host, args.local_repository_prefix,
+    ensure_dataengine_tensorflow_jars(local_jars_dir, args.local_repository_enabled,
                                       args.local_repository_packages_repo)
     configure_dataengine_spark(args.cluster_name, local_jars_dir, cluster_dir, args.datalake_enabled,
                                args.spark_configurations)
diff --git a/infrastructure-provisioning/src/general/scripts/os/tensor-rstudio_install_dataengine_kernels.py b/infrastructure-provisioning/src/general/scripts/os/tensor-rstudio_install_dataengine_kernels.py
index 2adf03a..052d756 100644
--- a/infrastructure-provisioning/src/general/scripts/os/tensor-rstudio_install_dataengine_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/os/tensor-rstudio_install_dataengine_kernels.py
@@ -67,19 +67,15 @@ if __name__ == "__main__":
         region = ''
     if 'spark_configurations' not in os.environ:
         os.environ['spark_configurations'] = '[]'
-    if 'local_repository_host' in os.environ:
-        repository_host = os.environ['local_repository_host']
+    if os.environ['local_repository_enabled'] == 'True':
         packages_repo = os.environ['local_repository_packages_repo']
-        repository_prefix = os.environ['local_repository_prefix']
     else:
-        repository_host = ''
         packages_repo = ''
-        repository_prefix = ''
     configure_notebook(args.keyfile, env.host_string)
     sudo('/usr/bin/python /usr/local/bin/tensor-rstudio_dataengine_create_configs.py '
          '--cluster_name {} --spark_version {} --hadoop_version {} --os_user {} --spark_master {} --region {} '
-         '--datalake_enabled {} --spark_configurations "{}" --local_repository_host "{}" '
-         '--local_repository_packages_repo "{}" --local_repository_prefix "{}"'.
+         '--datalake_enabled {} --spark_configurations "{}" --local_repository_enabled {} '
+         '--local_repository_packages_repo "{}" '.
          format(args.cluster_name, args.spark_version, args.hadoop_version, args.os_user, args.spark_master, region,
-                args.datalake_enabled, os.environ['spark_configurations'], repository_host, packages_repo,
-                repository_prefix))
+                args.datalake_enabled, os.environ['spark_configurations'], os.environ['local_repository_enabled'],
+                packages_repo))
diff --git a/infrastructure-provisioning/src/general/scripts/os/tensor_dataengine_create_configs.py b/infrastructure-provisioning/src/general/scripts/os/tensor_dataengine_create_configs.py
index e82f3ce..4698dc7 100644
--- a/infrastructure-provisioning/src/general/scripts/os/tensor_dataengine_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/tensor_dataengine_create_configs.py
@@ -41,9 +41,8 @@ parser.add_argument('--spark_master', type=str, default='')
 parser.add_argument('--region', type=str, default='')
 parser.add_argument('--datalake_enabled', type=str, default='')
 parser.add_argument('--spark_configurations', type=str, default='')
-parser.add_argument('--local_repository_host', type=str, default='')
+parser.add_argument('--local_repository_enabled', type=str, default='')
 parser.add_argument('--local_repository_packages_repo', type=str, default='')
-parser.add_argument('--local_repository_prefix', type=str, default='')
 args = parser.parse_args()
 
 kernels_dir = '/home/' + args.os_user + '/.local/share/jupyter/kernels/'
@@ -52,15 +51,13 @@ local_jars_dir = '/opt/jars/'
 
 spark_version = args.spark_version
 hadoop_version = args.hadoop_version
-if args.local_repository_host != '':
-    scala_link = 'https://{0}/{1}/{2}/'.format(args.local_repository_host, args.local_repository_prefix,
-                                               args.local_repository_packages_repo)
+if args.local_repository_enabled == 'True':
+    scala_link = '{0}/'.format(args.local_repository_packages_repo)
 else:
     scala_link = "http://www.scala-lang.org/files/archive/"
-if args.local_repository_host != '':
-    spark_link = "https://{0}/{3}/{4}/spark-{1}-bin-hadoop{2}.tgz".format(
-        args.local_repository_host, spark_version, hadoop_version, args.local_repository_prefix,
-        args.local_repository_packages_repo)
+if args.local_repository_enabled == 'True':
+    spark_link = "{0}/spark-{1}-bin-hadoop{2}.tgz".format(
+        args.local_repository_packages_repo, spark_version, hadoop_version)
 else:
     spark_link = "https://archive.apache.org/dist/spark/spark-{0}/spark-{0}-bin-hadoop{1}.tgz".format(spark_version,
                                                                                                       hadoop_version)
@@ -116,7 +113,7 @@ if __name__ == "__main__":
         dataengine_dir_prepare('/opt/{}/'.format(args.cluster_name))
         install_dataengine_spark(args.cluster_name, spark_link, spark_version, hadoop_version, cluster_dir, args.os_user,
                                  args.datalake_enabled)
-        ensure_dataengine_tensorflow_jars(local_jars_dir, args.local_repository_host, args.local_repository_prefix,
+        ensure_dataengine_tensorflow_jars(local_jars_dir, args.local_repository_enabled,
                                           args.local_repository_packages_repo)
         configure_dataengine_spark(args.cluster_name, local_jars_dir, cluster_dir, args.datalake_enabled,
                                    args.spark_configurations)
diff --git a/infrastructure-provisioning/src/general/scripts/os/tensor_install_dataengine_kernels.py b/infrastructure-provisioning/src/general/scripts/os/tensor_install_dataengine_kernels.py
index 75e1ae3..9a3bd3f 100644
--- a/infrastructure-provisioning/src/general/scripts/os/tensor_install_dataengine_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/os/tensor_install_dataengine_kernels.py
@@ -68,19 +68,15 @@ if __name__ == "__main__":
         region = ''
     if 'spark_configurations' not in os.environ:
         os.environ['spark_configurations'] = '[]'
-    if 'local_repository_host' in os.environ:
-        repository_host = os.environ['local_repository_host']
+    if os.environ['local_repository_enabled'] == 'True':
         packages_repo = os.environ['local_repository_packages_repo']
-        repository_prefix = os.environ['local_repository_prefix']
     else:
-        repository_host = ''
         packages_repo = ''
-        repository_prefix = ''
     configure_notebook(args.keyfile, env.host_string)
     sudo('/usr/bin/python /usr/local/bin/tensor_dataengine_create_configs.py '
          '--cluster_name {} --spark_version {} --hadoop_version {} --os_user {} --spark_master {} --region {} '
-         '--datalake_enabled {} --spark_configurations "{}" --local_repository_host "{}" '
-         '--local_repository_packages_repo "{}" --local_repository_prefix "{}" '.
+         '--datalake_enabled {} --spark_configurations "{}" --local_repository_enabled "{}" '
+         '--local_repository_packages_repo "{}" '.
          format(args.cluster_name, args.spark_version, args.hadoop_version, args.os_user, args.spark_master, region,
-                args.datalake_enabled, os.environ['spark_configurations'], repository_host, packages_repo,
-                repository_prefix))
+                args.datalake_enabled, os.environ['spark_configurations'], os.environ['local_repository_enabled'],
+                packages_repo))
diff --git a/infrastructure-provisioning/src/general/scripts/os/zeppelin_dataengine_create_configs.py b/infrastructure-provisioning/src/general/scripts/os/zeppelin_dataengine_create_configs.py
index 9a04a51..9b39c37 100644
--- a/infrastructure-provisioning/src/general/scripts/os/zeppelin_dataengine_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/os/zeppelin_dataengine_create_configs.py
@@ -46,19 +46,17 @@ parser.add_argument('--region', type=str, default='')
 parser.add_argument('--datalake_enabled', type=str, default='')
 parser.add_argument('--r_enabled', type=str, default='')
 parser.add_argument('--spark_configurations', type=str, default='')
-parser.add_argument('--local_repository_host', type=str, default='')
+parser.add_argument('--local_repository_enabled', type=str, default='')
 parser.add_argument('--local_repository_packages_repo', type=str, default='')
-parser.add_argument('--local_repository_prefix', type=str, default='')
 args = parser.parse_args()
 
 cluster_dir = '/opt/' + args.cluster_name + '/'
 local_jars_dir = '/opt/jars/'
 spark_version = args.spark_version
 hadoop_version = args.hadoop_version
-if args.local_repository_host != '':
-    spark_link = "https://{0}/{3}/{4}/spark-{1}-bin-hadoop{2}.tgz".format(
-        args.local_repository_host, spark_version, hadoop_version, args.local_repository_prefix,
-        args.local_repository_packages_repo)
+if args.local_repository_enabled == 'True':
+    spark_link = "{0}/spark-{1}-bin-hadoop{2}.tgz".format(
+        args.local_repository_packages_repo, spark_version, hadoop_version)
 else:
     spark_link = "https://archive.apache.org/dist/spark/spark-{0}/spark-{0}-bin-hadoop{1}.tgz".format(spark_version,
                                                                                                       hadoop_version)
@@ -162,11 +160,10 @@ def configure_zeppelin_dataengine_interpreter(cluster_name, cluster_dir, os_user
 def install_remote_livy(args):
     local('sudo chown ' + args.os_user + ':' + args.os_user + ' -R /opt/zeppelin/')
     local('sudo service zeppelin-notebook stop')
-    if args.local_repository_host != '':
-        local('sudo wget -i https://{1}/{3}/{4}/livy-server-{0}.zip -O '
-              '/opt/{2}/livy-server-{0}.zip'.format(args.livy_version, args.local_repository_host,
-                                                    args.cluster_name, args.local_repository_prefix,
-                                                    args.local_repository_packages_repo))
+    if args.local_repository_enabled == 'True':
+        local('sudo wget -i {1}/livy-server-{0}.zip -O '
+              '/opt/{2}/livy-server-{0}.zip'.format(args.livy_version, args.local_repository_packages_repo,
+                                                    args.cluster_name))
     else:
         local('sudo -i wget http://archive.cloudera.com/beta/livy/livy-server-' + args.livy_version + '.zip -O /opt/' +
               args.cluster_name + '/livy-server-' + args.livy_version + '.zip')
diff --git a/infrastructure-provisioning/src/general/scripts/os/zeppelin_install_dataengine_kernels.py b/infrastructure-provisioning/src/general/scripts/os/zeppelin_install_dataengine_kernels.py
index 505dd33..3b5196e 100644
--- a/infrastructure-provisioning/src/general/scripts/os/zeppelin_install_dataengine_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/os/zeppelin_install_dataengine_kernels.py
@@ -73,23 +73,19 @@ if __name__ == "__main__":
         region = ''
     if 'spark_configurations' not in os.environ:
         os.environ['spark_configurations'] = '[]'
-    if 'local_repository_host' in os.environ:
-        repository_host = os.environ['local_repository_host']
+    if os.environ['local_repository_enabled'] == 'True':
         packages_repo = os.environ['local_repository_packages_repo']
-        repository_prefix = os.environ['local_repository_prefix']
     else:
-        repository_host = ''
         packages_repo = ''
-        repository_prefix = ''
     configure_notebook(args.keyfile, env.host_string)
     livy_version = os.environ['notebook_livy_version']
     r_enabled = os.environ['notebook_r_enabled']
     sudo('/usr/bin/python /usr/local/bin/zeppelin_dataengine_create_configs.py '
          '--cluster_name {} --spark_version {} --hadoop_version {} --os_user {} --spark_master {} --keyfile {} \
          --notebook_ip {} --livy_version {} --multiple_clusters {} --region {} --datalake_enabled {} '
-         '--r_enabled {} --spark_configurations "{}" --local_repository_host "{}" '
-         '--local_repository_packages_repo "{}" --local_repository_prefix "{}" '.
+         '--r_enabled {} --spark_configurations "{}" --local_repository_enabled {} '
+         '--local_repository_packages_repo "{}" '.
          format(args.cluster_name, args.spark_version, args.hadoop_version, args.os_user, args.spark_master,
                 args.keyfile, args.notebook_ip, livy_version, os.environ['notebook_multiple_clusters'], region,
-                args.datalake_enabled, r_enabled, os.environ['spark_configurations'], repository_host, packages_repo,
-                repository_prefix))
+                args.datalake_enabled, r_enabled, os.environ['spark_configurations'],
+                os.environ['local_repository_enabled'], packages_repo))
diff --git a/infrastructure-provisioning/src/general/templates/aws/Rprofile.site b/infrastructure-provisioning/src/general/templates/aws/Rprofile.site
index a8cdc61..d7dc159 100644
--- a/infrastructure-provisioning/src/general/templates/aws/Rprofile.site
+++ b/infrastructure-provisioning/src/general/templates/aws/Rprofile.site
@@ -1,5 +1,5 @@
 local({
   r <- getOption("repos")
-  r["CRAN"] <- "https://REPOSITORY_HOST/REPOSITORY_PREFIX/R_REPO/"
+  r["CRAN"] <- "R_REPO/"
   options(repos = r)
 })
diff --git a/infrastructure-provisioning/src/jupyter/scripts/configure_jupyter_node.py b/infrastructure-provisioning/src/jupyter/scripts/configure_jupyter_node.py
index 67333c3..9fdf169 100644
--- a/infrastructure-provisioning/src/jupyter/scripts/configure_jupyter_node.py
+++ b/infrastructure-provisioning/src/jupyter/scripts/configure_jupyter_node.py
@@ -41,10 +41,8 @@ args = parser.parse_args()
 spark_version = args.spark_version
 hadoop_version = args.hadoop_version
 jupyter_version = os.environ['notebook_jupyter_version']
-if 'local_repository_host' in os.environ:
-    scala_link = 'https://{0}/{1}/{2}/'.format(os.environ['local_repository_host'],
-                                               os.environ['local_repository_prefix'],
-                                               os.environ['local_repository_packages_repo'])
+if os.environ['local_repository_enabled'] == 'True':
+    scala_link = '{0}/'.format(os.environ['local_repository_packages_repo'])
 else:
     scala_link = "http://www.scala-lang.org/files/archive/"
 if args.region == 'cn-north-1':
@@ -53,10 +51,9 @@ if args.region == 'cn-north-1':
 else:
     spark_link = "https://archive.apache.org/dist/spark/spark-" + spark_version + "/spark-" + spark_version + \
                  "-bin-hadoop" + hadoop_version + ".tgz"
-if 'local_repository_host' in os.environ:
-    spark_link = "https://{0}/{3}/{4}/spark-{1}-bin-hadoop{2}.tgz".format(
-        os.environ['local_repository_host'], spark_version, hadoop_version, os.environ['local_repository_prefix'],
-        os.environ['local_repository_packages_repo'])
+if os.environ['local_repository_enabled'] == 'True':
+    spark_link = "{0}/spark-{1}-bin-hadoop{2}.tgz".format(
+        os.environ['local_repository_packages_repo'], spark_version, hadoop_version)
 
 pyspark_local_path_dir = '/home/' + args.os_user + '/.local/share/jupyter/kernels/pyspark_local/'
 py3spark_local_path_dir = '/home/' + args.os_user + '/.local/share/jupyter/kernels/py3spark_local/'
@@ -67,9 +64,8 @@ jars_dir = '/opt/jars/'
 templates_dir = '/root/templates/'
 files_dir = '/root/files/'
 local_spark_path = '/opt/spark/'
-if 'local_repository_host' in os.environ:
-    toree_link = "https://{0}/{1}/{2}/toree-0.2.0.tar.gz".format(
-        os.environ['local_repository_host'], os.environ['local_repository_prefix'],
+if os.environ['local_repository_enabled'] == 'True':
+    toree_link = "{0}/toree-0.2.0.tar.gz".format(
         os.environ['local_repository_packages_repo'])
 else:
     toree_link = 'http://archive.apache.org/dist/incubator/toree/0.2.0-incubating/toree-pip/toree-0.2.0.tar.gz'
diff --git a/infrastructure-provisioning/src/rstudio/scripts/configure_rstudio_node.py b/infrastructure-provisioning/src/rstudio/scripts/configure_rstudio_node.py
index 78f9bac..474ec12 100644
--- a/infrastructure-provisioning/src/rstudio/scripts/configure_rstudio_node.py
+++ b/infrastructure-provisioning/src/rstudio/scripts/configure_rstudio_node.py
@@ -48,10 +48,9 @@ if args.region == 'cn-north-1':
 else:
     spark_link = "https://archive.apache.org/dist/spark/spark-" + spark_version + "/spark-" + spark_version + \
                  "-bin-hadoop" + hadoop_version + ".tgz"
-if 'local_repository_host' in os.environ:
-    spark_link = "https://{0}/{3}/{4}/spark-{1}-bin-hadoop{2}.tgz".format(
-        os.environ['local_repository_host'], spark_version, hadoop_version, os.environ['local_repository_prefix'],
-        os.environ['local_repository_packages_repo'])
+if os.environ['local_repository_enabled'] == 'True':
+    spark_link = "{0}/spark-{1}-bin-hadoop{2}.tgz".format(
+        os.environ['local_repository_packages_repo'], spark_version, hadoop_version)
 local_spark_path = '/opt/spark/'
 jars_dir = '/opt/jars/'
 templates_dir = '/root/templates/'
diff --git a/infrastructure-provisioning/src/ssn/scripts/configure_docker.py b/infrastructure-provisioning/src/ssn/scripts/configure_docker.py
index 66d5630..6856985 100644
--- a/infrastructure-provisioning/src/ssn/scripts/configure_docker.py
+++ b/infrastructure-provisioning/src/ssn/scripts/configure_docker.py
@@ -60,9 +60,8 @@ def update_repository(dlab_path, repository_host, region):
                  'base_Dockerfile'.format(repository_host))
             sudo('sed -i "/pip install/s/jupyter/ipython==5.0.0 jupyter==1.0.0/g" base_Dockerfile')
             sudo('sed -i "22i COPY general/files/os/debian/sources.list /etc/apt/sources.list" base_Dockerfile')
-        if 'local_repository_host' in os.environ:
-            sudo('sed -i "s|^FROM ubuntu.*|FROM {0}/{1}/{2}/dlab-pre-base|g" base_Dockerfile'.format(
-                repository_host, os.environ['local_repository_prefix'],
+        if os.environ['local_repository_enabled'] == 'True':
+            sudo('sed -i "s|^FROM ubuntu.*|FROM {0}/dlab-pre-base|g" base_Dockerfile'.format(
                 os.environ['local_repository_docker_internal_repo']))
             sudo('sed -i "/pip install/d;/apt-get/d" base_Dockerfile')
             # sudo('docker login -u docker-nexus -p docker-nexus {}:8083'.format(repository_host))
@@ -82,9 +81,9 @@ def build_docker_images(image_list, region, dlab_path):
                  '/home/{1}/keys/azure_auth.json'.format(args.dlab_path, args.os_user))
         if region == 'cn-north-1':
             update_repository(dlab_path, os.environ['conf_pypi_mirror'], region)
-        if 'local_repository_host' in os.environ:
+        if os.environ['local_repository_enabled'] == 'True':
             update_repository(dlab_path, os.environ['local_repository_host'], region)
-        if 'local_repository_host' in os.environ:
+        if os.environ['local_repository_enabled'] == 'True':
             sudo('mkdir -p {}sources/infrastructure-provisioning/src/base/certs'.format(args.dlab_path))
             put('/root/certs/repository.crt', '{}sources/infrastructure-provisioning/src/base/certs/'
                                               'repository.crt'.format(args.dlab_path), use_sudo=True)
diff --git a/infrastructure-provisioning/src/ssn/scripts/configure_ui.py b/infrastructure-provisioning/src/ssn/scripts/configure_ui.py
index e2f8bcf..ce7cf61 100644
--- a/infrastructure-provisioning/src/ssn/scripts/configure_ui.py
+++ b/infrastructure-provisioning/src/ssn/scripts/configure_ui.py
@@ -142,12 +142,10 @@ def build_ui():
             sudo('sudo chown -R {} {}/*'.format(args.os_user, args.dlab_path))
 
         # Building Back-end
-        if 'local_repository_host' in os.environ:
-            sudo('sed -i "s|BINTRAY-REPO|https://{0}/{2}/{3}/|g" '
-                 '{1}/sources/services/self-service/pom.xml'.format(os.environ['local_repository_host'],
-                                                                    args.dlab_path,
-                                                                    os.environ['local_repository_prefix'],
-                                                                    os.environ['local_repository_maven_bintray_repo']))
+        if os.environ['local_repository_enabled'] == 'True':
+            sudo('sed -i "s|BINTRAY-REPO|{0}/|g" '
+                 '{1}/sources/services/self-service/pom.xml'.format(os.environ['local_repository_maven_bintray_repo'],
+                                                                    args.dlab_path))
         else:
             sudo('sed -i "s|BINTRAY-REPO|https://dl.bintray.com/michaelklishin/maven/|g" '
                  '{}/sources/services/self-service/pom.xml'.format(args.dlab_path))
@@ -194,7 +192,7 @@ def build_ui():
             sudo('cp {0}/sources/services/billing-azure/target/billing-azure*.jar {0}/webapp/billing/lib/'.format(
                 args.dlab_path))
         elif args.cloud_provider == 'aws':
-            if 'local_repository_host' in os.environ:
+            if os.environ['local_repository_enabled'] == 'True':
                 sudo('sed -i "s|region:|region: {1}|g" {0}/sources/services/billing-aws/billing.yml'.format(
                     args.dlab_path, os.environ['aws_region']))
             sudo('cp {0}/sources/services/billing-aws/billing.yml {0}/webapp/billing/conf/'.format(args.dlab_path))
diff --git a/infrastructure-provisioning/src/ssn/templates/settings.xml b/infrastructure-provisioning/src/ssn/templates/settings.xml
index 2ccacd1..0fc3366 100644
--- a/infrastructure-provisioning/src/ssn/templates/settings.xml
+++ b/infrastructure-provisioning/src/ssn/templates/settings.xml
@@ -13,7 +13,7 @@
   <mirrors>
       <mirror>
       <id>dlab-repo</id>
-      <url>https://REPOSITORY_HOST/REPOSITORY_PREFIX/REPOSITORY_MAVEN_REPO</url>
+      <url>REPOSITORY_MAVEN_REPO</url>
       <mirrorOf>central</mirrorOf>
     </mirror>
   </mirrors>
diff --git a/infrastructure-provisioning/src/tensor-rstudio/scripts/configure_tensor-rstudio_node.py b/infrastructure-provisioning/src/tensor-rstudio/scripts/configure_tensor-rstudio_node.py
index 6f97623..9192b57 100644
--- a/infrastructure-provisioning/src/tensor-rstudio/scripts/configure_tensor-rstudio_node.py
+++ b/infrastructure-provisioning/src/tensor-rstudio/scripts/configure_tensor-rstudio_node.py
@@ -56,10 +56,9 @@ if args.region == 'cn-north-1':
 else:
     spark_link = "https://archive.apache.org/dist/spark/spark-" + spark_version + "/spark-" + spark_version + \
                  "-bin-hadoop" + hadoop_version + ".tgz"
-if 'local_repository_host' in os.environ:
-    spark_link = "https://{0}/{3}/{4}/spark-{1}-bin-hadoop{2}.tgz".format(
-        os.environ['local_repository_host'], spark_version, hadoop_version, os.environ['local_repository_prefix'],
-        os.environ['local_repository_packages_repo'])
+if os.environ['local_repository_enabled'] == 'True':
+    spark_link = "{0}/spark-{1}-bin-hadoop{2}.tgz".format(
+        os.environ['local_repository_packages_repo'], spark_version, hadoop_version)
 local_spark_path = '/opt/spark/'
 jars_dir = '/opt/jars/'
 templates_dir = '/root/templates/'
diff --git a/infrastructure-provisioning/src/tensor/scripts/configure_tensor_node.py b/infrastructure-provisioning/src/tensor/scripts/configure_tensor_node.py
index 54e1a04..a207bd5 100644
--- a/infrastructure-provisioning/src/tensor/scripts/configure_tensor_node.py
+++ b/infrastructure-provisioning/src/tensor/scripts/configure_tensor_node.py
@@ -50,10 +50,9 @@ if args.region == 'cn-north-1':
 else:
     spark_link = "https://archive.apache.org/dist/spark/spark-" + spark_version + "/spark-" + spark_version + \
                  "-bin-hadoop" + hadoop_version + ".tgz"
-if 'local_repository_host' in os.environ:
-    spark_link = "https://{0}/{3}/{4}/spark-{1}-bin-hadoop{2}.tgz".format(
-        os.environ['local_repository_host'], spark_version, hadoop_version, os.environ['local_repository_prefix'],
-        os.environ['local_repository_packages_repo'])
+if os.environ['local_repository_enabled'] == 'True':
+    spark_link = "{0}/spark-{1}-bin-hadoop{2}.tgz".format(
+        os.environ['local_repository_packages_repo'], spark_version, hadoop_version)
 pyspark_local_path_dir = '/home/' + args.os_user + '/.local/share/jupyter/kernels/pyspark_local/'
 py3spark_local_path_dir = '/home/' + args.os_user + '/.local/share/jupyter/kernels/py3spark_local/'
 local_spark_path = '/opt/spark/'
diff --git a/infrastructure-provisioning/src/zeppelin/scripts/configure_zeppelin_node.py b/infrastructure-provisioning/src/zeppelin/scripts/configure_zeppelin_node.py
index 75f35b5..b6eada2 100644
--- a/infrastructure-provisioning/src/zeppelin/scripts/configure_zeppelin_node.py
+++ b/infrastructure-provisioning/src/zeppelin/scripts/configure_zeppelin_node.py
@@ -52,17 +52,14 @@ args = parser.parse_args()
 
 spark_version = args.spark_version
 hadoop_version = args.hadoop_version
-if 'local_repository_host' in os.environ:
-    scala_link = 'https://{0}/{1}/{2}/'.format(os.environ['local_repository_host'],
-                                               os.environ['local_repository_prefix'],
-                                               os.environ['local_repository_packages_repo'])
+if os.environ['local_repository_enabled'] == 'True':
+    scala_link = '{0}/'.format(os.environ['local_repository_packages_repo'])
 else:
     scala_link = "http://www.scala-lang.org/files/archive/"
 zeppelin_version = args.zeppelin_version
-if 'local_repository_host' in os.environ:
-    zeppelin_link = "https://{1}/{2}/{3}/zeppelin-{0}-bin-netinst.tgz".format(
-        zeppelin_version, os.environ['local_repository_host'], os.environ['local_repository_prefix'],
-        os.environ['local_repository_packages_repo'])
+if os.environ['local_repository_enabled'] == 'True':
+    zeppelin_link = "{1}/zeppelin-{0}-bin-netinst.tgz".format(
+        zeppelin_version, os.environ['local_repository_packages_repo'])
 else:
     zeppelin_link = "http://archive.apache.org/dist/zeppelin/zeppelin-{0}/zeppelin-{0}-bin-netinst.tgz".format(
         zeppelin_version)
@@ -72,10 +69,9 @@ if args.region == 'cn-north-1':
 else:
     spark_link = "https://archive.apache.org/dist/spark/spark-" + spark_version + "/spark-" + spark_version + \
                  "-bin-hadoop" + hadoop_version + ".tgz"
-if 'local_repository_host' in os.environ:
-    spark_link = "https://{0}/{3}/{4}/spark-{1}-bin-hadoop{2}.tgz".format(
-        os.environ['local_repository_host'], spark_version, hadoop_version, os.environ['local_repository_prefix'],
-        os.environ['local_repository_packages_repo'])
+if os.environ['local_repository_enabled'] == 'True':
+    spark_link = "{0}/spark-{1}-bin-hadoop{2}.tgz".format(
+        os.environ['local_repository_packages_repo'], spark_version, hadoop_version)
 zeppelin_interpreters = "md,python,livy,shell"
 python3_version = "3.4"
 local_spark_path = '/opt/spark/'
@@ -173,10 +169,9 @@ def configure_local_spark_kernels(args):
 
 def install_local_livy(args):
     if not exists('/home/' + args.os_user + '/.ensure_dir/local_livy_ensured'):
-        if 'local_repository_host' in os.environ:
-            sudo('wget https://{1}/{2}/{3}/livy-server-{0}.zip -O /opt/livy-server-{0}.zip'.format(
-                 args.livy_version, os.environ['local_repository_host'], os.environ['local_repository_prefix'],
-                 os.environ['local_repository_packages_repo']))
+        if os.environ['local_repository_enabled'] == 'True':
+            sudo('wget {1}/livy-server-{0}.zip -O /opt/livy-server-{0}.zip'.format(
+                 args.livy_version, os.environ['local_repository_packages_repo']))
         else:
             sudo('wget http://archive.cloudera.com/beta/livy/livy-server-{0}.zip -O /opt/livy-server-{0}.zip'.format(
                 args.livy_version))


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@dlab.apache.org
For additional commands, e-mail: commits-help@dlab.apache.org